ngram
listlengths
0
67.8k
[ "from datetime import datetime from pprint import pprint import ray from ray import", "int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda env:", "if len(sys.argv) >= 4 and sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus =", "DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment", "# lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): # # for worker", "# # def set_starting_distance(ego_starting_distance): # # for worker in trainer._workers: # # print(worker)", "reporter(**result) if i % checkpoint_frequency == 0: # checkpoint_path = trainer.logdir # checkpoint_path", "= make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker(", "= trainer.save() print('saved to checkpoint ', checkpoint_path) def on_episode_end(info): # print(info) episode =", "', checkpoint_path) def on_episode_end(info): # print(info) episode = info['episode'] # print(info) # trainer", "for worker in trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance)", "import ray from ray import tune from ray.rllib.agents import Trainer from ray.tune.logger import", "sys.argv) else: if not ray.is_initialized(): ray.init() print('setup config') config, run_prefix = setup_run(default_config) #", "= make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results'", "sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray", "os import sys import tempfile from datetime import datetime from pprint import pprint", "'2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer", "lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): #", "from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config,", "ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools", "ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance =", "redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del", "ray.is_initialized(): ray.init() print('setup config') config, run_prefix = setup_run(default_config) # config, this_env = setup_environment_config(config)", "# # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in range(max_iters):", "i in range(max_iters): result = trainer.train() reporter(**result) if i % checkpoint_frequency == 0:", "= setup_run(default_config) # config, this_env = setup_environment_config(config) print(\"Nodes in the Ray cluster:\") pprint(ray.nodes())", "name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' # resources_per_trial={'gpu':1}, ) print('shutting down') ray.shutdown()", "and sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv", "'argv: ', sys.argv) else: if not ray.is_initialized(): ray.init() print('setup config') config, run_prefix =", "pprint import pprint import ray from ray import tune from ray.rllib.agents import Trainer", "ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL']", "# lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance):", "get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p',", "ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency", "def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) #", "in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers'] =", "trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name'))", "# checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c',", "print(info) # trainer = info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin", "def set_starting_distance(ego_starting_distance): # # for worker in trainer._workers: # # print(worker) # #", "trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance = 600.0 environment,", "get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config,", "_redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv:", "run_prefix = setup_run(default_config) # config, this_env = setup_environment_config(config) print(\"Nodes in the Ray cluster:\")", "sys import tempfile from datetime import datetime from pprint import pprint import ray", "None if len(sys.argv) >= 4 and sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus", "UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run", "def on_episode_end(info): # print(info) episode = info['episode'] # print(info) # trainer = info['trainer']", "'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint ', checkpoint_path)", "# print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path)", "# trainer = info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer')", "# set_starting_distance(ego_starting_distance) for i in range(max_iters): result = trainer.train() reporter(**result) if i %", "trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path)", "= '0' def train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None, rllib_config)", "# # # def set_starting_distance(ego_starting_distance): # # for worker in trainer._workers: # #", "checkpoint_path = trainer.save() print('saved to checkpoint ', checkpoint_path) def on_episode_end(info): # print(info) episode", "== 'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3]", "make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3) #", "env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): # # for worker in trainer._workers: #", "trainer.train() reporter(**result) if i % checkpoint_frequency == 0: # checkpoint_path = trainer.logdir #", "train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' # resources_per_trial={'gpu':1}, ) print('shutting down')", "if ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config = make_rllib_config(config)", "trainer = info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config", "def train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None, rllib_config) # trainer", "set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # #", "to checkpoint ', checkpoint_path) def on_episode_end(info): # print(info) episode = info['episode'] # print(info)", "trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint ', checkpoint_path) def on_episode_end(info): # print(info)", "episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus = None if len(sys.argv)", "import os import sys import tempfile from datetime import datetime from pprint import", "ray from ray import tune from ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger", "command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual", "ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): # #", "range(max_iters): result = trainer.train() reporter(**result) if i % checkpoint_frequency == 0: # checkpoint_path", "this_env = setup_environment_config(config) print(\"Nodes in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is", "= setup_environment_config(config) print(\"Nodes in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not", "print('setup config') config, run_prefix = setup_run(default_config) # config, this_env = setup_environment_config(config) print(\"Nodes in", "from pprint import pprint import ray from ray import tune from ray.rllib.agents import", "from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run from", "# trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # # #", "1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env(", "= common_default_config ray_num_cpus = None if len(sys.argv) >= 4 and sys.argv[-3] == 'ray':", "for i in range(max_iters): result = trainer.train() reporter(**result) if i % checkpoint_frequency ==", "pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config", "episode = info['episode'] # print(info) # trainer = info['trainer'] base_env = info['env'] #", "checkpoint ', checkpoint_path) def on_episode_end(info): # print(info) episode = info['episode'] # print(info) #", "# checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint ', checkpoint_path) def", "print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' # resources_per_trial={'gpu':1},", "trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] =", "set_starting_distance(ego_starting_distance) for i in range(max_iters): result = trainer.train() reporter(**result) if i % checkpoint_frequency", "import datetime from pprint import pprint import ray from ray import tune from", "== 0: # checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) #", "not ray.is_initialized(): ray.init() print('setup config') config, run_prefix = setup_run(default_config) # config, this_env =", "import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0'", "worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in range(max_iters): result = trainer.train() reporter(**result) if", "= sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv)", "# # for worker in trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) #", "import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import", "ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus,", "from ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from", "600.0 environment, trainer = make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency = 1", "trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in", "'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path", "make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( #", "ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer =", "tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' # resources_per_trial={'gpu':1}, )", "the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus", "'0' def train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None, rllib_config) #", "tune from ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR", "print('begin trainer') default_config = common_default_config ray_num_cpus = None if len(sys.argv) >= 4 and", "# for worker in trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # #", "= sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4]", "', sys.argv) else: if not ray.is_initialized(): ray.init() print('setup config') config, run_prefix = setup_run(default_config)", "os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), #", "checkpoint_frequency = 1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda", "in range(max_iters): result = trainer.train() reporter(**result) if i % checkpoint_frequency == 0: #", "cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus - 1", "= info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus =", "trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' # resources_per_trial={'gpu':1}, ) print('shutting down') ray.shutdown() print('done')", "trainer = make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): #", "trainer = make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency = 1 max_iters =", "# # set_starting_distance(ego_starting_distance) for i in range(max_iters): result = trainer.train() reporter(**result) if i", "checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config,", "import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance", "# print(info) episode = info['episode'] # print(info) # trainer = info['trainer'] base_env =", "default_config = common_default_config ray_num_cpus = None if len(sys.argv) >= 4 and sys.argv[-3] ==", "# os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from", "len(sys.argv) >= 4 and sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1])", "rllib_config) # trainer = make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3) # def", "# def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance)))", "from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from", "= trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config,", "# 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint ',", "0: # checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path", "= base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus = None if len(sys.argv) >=", "sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv) else:", "pprint import ray from ray import tune from ray.rllib.agents import Trainer from ray.tune.logger", "= os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir,", "from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES']", "sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv =", "datetime import datetime from pprint import pprint import ray from ray import tune", "tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' # resources_per_trial={'gpu':1}, ) print('shutting", "configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if not ray.is_initialized(): ray.init() print('setup", "import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer", "= os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'),", "= trainer.train() reporter(**result) if i % checkpoint_frequency == 0: # checkpoint_path = trainer.logdir", "print(info) episode = info['episode'] # print(info) # trainer = info['trainer'] base_env = info['env']", "config, run_prefix = setup_run(default_config) # config, this_env = setup_environment_config(config) print(\"Nodes in the Ray", "import pprint import ray from ray import tune from ray.rllib.agents import Trainer from", "import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import", "base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus = None if len(sys.argv) >= 4", "tempfile from datetime import datetime from pprint import pprint import ray from ray", "print('saved to checkpoint ', checkpoint_path) def on_episode_end(info): # print(info) episode = info['episode'] #", "trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter):", "# # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in range(max_iters): result = trainer.train()", "None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running tune') tune.run( train,", "ray.init() print('setup config') config, run_prefix = setup_run(default_config) # config, this_env = setup_environment_config(config) print(\"Nodes", "lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): # # for worker in", "'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint", "trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def", "= int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ',", "common_default_config ray_num_cpus = None if len(sys.argv) >= 4 and sys.argv[-3] == 'ray': redis_password", "worker in trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for", "# worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in range(max_iters): result = trainer.train() reporter(**result)", "# print(info) # trainer = info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance", "# trainer = make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance):", "# del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if", "sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if not ray.is_initialized():", "config, this_env = setup_environment_config(config) print(\"Nodes in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus", "# checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path =", "# print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in range(max_iters): result", "'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] #", "= info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config =", "result = trainer.train() reporter(**result) if i % checkpoint_frequency == 0: # checkpoint_path =", "info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config", "checkpoint_frequency == 0: # checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment'))", "ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running", "= '2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import", "from ray import tune from ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger from", "CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def", "- 1 rllib_config = make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'],", "'name'), 'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save()", "print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if not ray.is_initialized(): ray.init()", "os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:',", "setup_run(default_config) # config, this_env = setup_environment_config(config) print(\"Nodes in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources())", "get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path =", "int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ', redis_password,", "# config, this_env = setup_environment_config(config) print(\"Nodes in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if", "Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus -", "4 and sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password)", "= make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency = 1 max_iters = int(100e3)", "reporter): ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config)", "Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'", "ray_num_cpus = None if len(sys.argv) >= 4 and sys.argv[-3] == 'ray': redis_password =", "set_starting_distance(ego_starting_distance): # # for worker in trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance)", "if i % checkpoint_frequency == 0: # checkpoint_path = trainer.logdir # checkpoint_path =", "1 rllib_config = make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config,", "else: if not ray.is_initialized(): ray.init() print('setup config') config, run_prefix = setup_run(default_config) # config,", "config') config, run_prefix = setup_run(default_config) # config, this_env = setup_environment_config(config) print(\"Nodes in the", "if not ray.is_initialized(): ray.init() print('setup config') config, run_prefix = setup_run(default_config) # config, this_env", "max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( #", ">= 4 and sys.argv[-3] == 'ray': redis_password = sys.argv[-2] ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"],", "import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import", "env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): # # for worker in trainer._workers:", "', redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if not ray.is_initialized(): ray.init() print('setup config')", "import Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] =", "# checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) #", "make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, # local_dir='~/ray_results' #", "ray_num_cpus = int(sys.argv[-1]) ray.init(address=os.environ[\"ip_head\"], _redis_password=redis_password) sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration:", "setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer", "get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to", "= info['episode'] # print(info) # trainer = info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance']", "sys.argv = sys.argv[0:-3] # del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ',", "scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer", "= int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev: ev.foreach_env( # lambda", "# def set_starting_distance(ego_starting_distance): # # for worker in trainer._workers: # # print(worker) #", "train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None, rllib_config) # trainer =", "setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from trainer.es_co_trainer import ESCOTrainer", "print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) #", "import sys import tempfile from datetime import datetime from pprint import pprint import", "trainer') default_config = common_default_config ray_num_cpus = None if len(sys.argv) >= 4 and sys.argv[-3]", "not None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running tune') tune.run(", "ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial:", "from ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result import DEFAULT_RESULTS_DIR #", "info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus = None", "= 600.0 environment, trainer = make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency =", "= None if len(sys.argv) >= 4 and sys.argv[-3] == 'ray': redis_password = sys.argv[-2]", "print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i in range(max_iters): result =", "os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance = 600.0 environment, trainer = make_environment_and_controller(None,", "setup_environment_config(config) print(\"Nodes in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None:", "is not None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running tune')", "in trainer._workers: # # print(worker) # # worker.env.process.set_starting_distance(ego_starting_distance) # # set_starting_distance(ego_starting_distance) for i", "% checkpoint_frequency == 0: # checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config,", "from trainer.es_co_trainer import ESCOTrainer os.environ['CUDA_VISIBLE_DEVICES'] = '0' def train(rllib_config, reporter): ego_starting_distance = 600.0", "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer", "# trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint ', checkpoint_path) def on_episode_end(info): #", "on_episode_end(info): # print(info) episode = info['episode'] # print(info) # trainer = info['trainer'] base_env", "ray_num_cpus, 'argv: ', sys.argv) else: if not ray.is_initialized(): ray.init() print('setup config') config, run_prefix", "ray.tune.result import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common", "ray import tune from ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result", "trainer.save() print('saved to checkpoint ', checkpoint_path) def on_episode_end(info): # print(info) episode = info['episode']", "checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:',", "environment, trainer = make_environment_and_controller(None, rllib_config) # trainer = make_trainer(config) checkpoint_frequency = 1 max_iters", "info['episode'] # print(info) # trainer = info['trainer'] base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] =", "'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'), 'c', get_setting(config, 'checkpoint'), # 'p', #", "checkpoint_path) def on_episode_end(info): # print(info) episode = info['episode'] # print(info) # trainer =", "checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved to checkpoint ', checkpoint_path) def on_episode_end(info):", "del sys.argv[-1:-4] print('ray configuration: ', redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if not", "pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config =", "= 1 max_iters = int(100e3) # def set_starting_distance(ego_starting_distance): # trainer.workers.foreach_worker( # lambda ev:", "# episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus = None if", "rllib_config = make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda trial: config['name'], config=rllib_config, #", "from scenario.trajectory_tracking.experiment.experiment_common import setup_environment from trainer.coordinated_dps_trainer import CoordinatedDPSTrainer from trainer.es_actual import ESActualTrainer from", "config['rllib']['num_workers'] = ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running tune') tune.run( train, name=config['name'],", "= ray_num_cpus - 1 rllib_config = make_rllib_config(config) print('running tune') tune.run( train, name=config['name'], trial_name_creator=lambda", "checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'experiment')) # checkpoint_path = os.path.join(checkpoint_path,", "import tempfile from datetime import datetime from pprint import pprint import ray from", "redis_password, ray_num_cpus, 'argv: ', sys.argv) else: if not ray.is_initialized(): ray.init() print('setup config') config,", "base_env = info['env'] # episode.custom_metrics['ego_starting_distance'] = base_env.get_unwrapped()[0].process.ego_starting_distance print('begin trainer') default_config = common_default_config ray_num_cpus", "import tune from ray.rllib.agents import Trainer from ray.tune.logger import UnifiedLogger from ray.tune.result import", "datetime from pprint import pprint import ray from ray import tune from ray.rllib.agents", "import DEFAULT_RESULTS_DIR # os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' from command_line_tools.run_tools import setup_run from scenario.trajectory_tracking.experiment.experiment_common import", "'experiment')) # checkpoint_path = os.path.join(checkpoint_path, get_setting(config, 'name')) # print('ld:', trainer.logdir, 'n:', get_setting(config, 'name'),", "print(\"Nodes in the Ray cluster:\") pprint(ray.nodes()) pprint(ray.cluster_resources()) if ray_num_cpus is not None: config['rllib']['num_workers']", "ev.foreach_env( # lambda env: env.process.set_starting_distance(ego_starting_distance))) # # # def set_starting_distance(ego_starting_distance): # # for", "'c', get_setting(config, 'checkpoint'), # 'p', # checkpoint_path) # trainer.save(checkpoint_path) checkpoint_path = trainer.save() print('saved", "i % checkpoint_frequency == 0: # checkpoint_path = trainer.logdir # checkpoint_path = os.path.join(checkpoint_path," ]
[ "Unless required by applicable law or agreed to in writing, software # distributed", "emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from", "( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli,", "the protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file) if", "for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings =", "import RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, )", "Returns: location of the extra recording \"\"\" if recording_definition[\"type\"] == \"somadistance\": location =", "-1: raise Exception( \"Cannot record at a given distance from apical point\" f\"if", "Exception( \"No MainProtocol found, but {prot} was found.\" f\"To use {prot_name}, please set", "define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else: protocols", "of the extra recording definition Returns: list of RecordingCustom \"\"\" recordings = []", "raise Exception( \"Cannot record at a given distance from apical point\" f\"if apical_point_isec", "be given if there is \"somadistanceapic\" in \"type\" of at least one of", "if \"extra_recordings\" in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec", "name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp", "stochkv_det (bool): set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all", "protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename (str): path to", "be run Raises: Exception: If a protocol that should only be used with", "from emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom,", "Step Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions", "protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else: protocols =", "ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating", "contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol", "dict containing the protocol data prefix (str): prefix used in naming responses, features,", "definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): contains the protocol", "that should only be used with MainProtocol is present in protocols_dict \"\"\" #", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None return", "step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus", "if stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\", ]: logger.warning( \"vecstim random not set", "name of the protocol protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording):", "stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], )", "prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if \"Main\" in protocols_dict: efeatures = define_efeatures(", "be used as prefix in output filenames syn_locs (list): list of synapse locations", "to the features file mtype (str): morphology name to be used as prefix", "\"somadistanceapic\": if apical_point_isec == -1: raise Exception( \"Cannot record at a given distance", "prefix=prefix, ) other_protocols = [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict:", "any extra recordings prot_path (str): path to the protocols file features_path (str): path", "\"\"\"Define protocols. Args: protocols_filename (str): path to the protocols file stochkv_det (bool): set", "in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\",", "ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli,", "syn_locs): \"\"\"Read Netstim protocol from definitions. Args: protocol_name (str): name of the protocol", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "are not in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] #", "dict containing the protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions =", "protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol(", "(int): apical point section index Should be given if there is \"somadistanceapic\" in", "set if stochastic or deterministic Returns: StepProtocol: Step Protocol \"\"\" # pylint: disable=undefined-loop-variable", "recording definition is neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of the", "RampThresholdProtocol: Ramp Protocol depending on cell's threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus", "use {prot_name}, please set MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None,", "RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures from", "/ EPFL # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype,", "read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli = [] for stimulus_definition in", "pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict)", "location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\": location", "protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use", "this protocol Returns: RampThresholdProtocol: Ramp Protocol depending on cell's threshold current \"\"\" ramp_definition", "(RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures prefix (str): prefix used in naming", "syn_locs, ) if \"Main\" in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype, )", "protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec )", "Copyright 2020-2021 Blue Brain Project / EPFL # Licensed under the Apache License,", "ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"],", "[step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"],", "path to the protocols file stochkv_det (bool): set if stochastic or deterministic prefix", "data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set if", "\"\"\"Read Vecstim protocol from definitions. Args: protocol_name (str): name of the protocol protocol_definition", "a protocol that should only be used with MainProtocol is present in protocols_dict", "step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None return RampProtocol( name=protocol_name,", "\"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of the extra recording \"\"\" if", "stochkv_det (bool): set if stochastic or deterministic Returns: StepProtocol: Step Protocol \"\"\" #", "to be run Raises: Exception: If a protocol that should only be used", "if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = [] for step_definition in step_definitions:", "synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"],", "if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, )", "location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\"", "in protocols_dict \"\"\" # Those protocols cannot be used if they are not", "Those protocols cannot be used if they are not in MainProtocol forbidden_prots =", "If a protocol that should only be used with MainProtocol is present in", "syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict containing protocols. Args: apical_point_isec (int): section index", "location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\" in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location", "[step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"],", "recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol", "and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det )", "deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking,", "\"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions = [step_definitions]", "holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol from", ") logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec", "ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus", "forbidden_prots: prot_name = type(prot).__name__ raise Exception( \"No MainProtocol found, but {prot} was found.\"", "the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any,", "not use this file except in compliance with the License. # You may", "stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name]", "def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol from definition. Args: protocol_name (str): name", "stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol from protocol definition to protocols dict. Args:", "stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None", "\"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition,", "the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating synapses \"\"\" stim_definition", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol from definitions. Args: protocol_name (str): name of", "apical_point_isec, syn_locs, ) if \"Main\" in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype,", "location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def", "in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the", "recordings prot_path (str): path to the protocols file features_path (str): path to the", "a protocol containing Vecstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"]", "agreed to in writing, software # distributed under the License is distributed on", "\"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition,", "apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename (str): path to the protocols file", "which to append the protocol protocol_name (str): name of the protocol protocol_definition (dict):", "protocols cannot be used if they are not in MainProtocol forbidden_prots = [", "\"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) # add protocol to", "[] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = []", "etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None)", "configuration data apical_point_isec (int): apical point section index. Should be given if the", "step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = [] for", "step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0,", "seclist_to_sec = { \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", } def", "= protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus", "present in protocols_dict \"\"\" # Those protocols cannot be used if they are", "ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) \"\"\" if \"type\" in", "stochastic or deterministic prefix (str): prefix used in naming responses, features, recordings, etc.", "= protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\"", "check the class name of each protocol for prot in protocols_dict.values(): if type(prot).__name__", "protocol_definition, prefix, apical_point_isec ) # add protocol to protocol dict add_protocol( protocols_dict, protocol_name,", "threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"],", "soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec == -1: raise Exception(", "f\"To use {prot_name}, please set MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1,", "step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings,", "etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[", "protocols to be run Raises: Exception: If a protocol that should only be", "step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def", "\"\"\"Read ramp protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition", "class name of each protocol for prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots:", "step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\"", "if there is \"somadistanceapic\" in \"type\" of at least one of the extra", "all protocols to be run If this function is called, should contain the", "import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\",", "protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepProtocol: Step Protocol \"\"\"", ") elif recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], )", "'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"],", "to in writing, software # distributed under the License is distributed on an", "features, recordings, etc. apical_point_isec (int): apical point section index Should be given if", "this protocol stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix used", "implied. # See the License for the specific language governing permissions and #", "the recording definition \"type\" is \"somadistanceapic\" and apical_point_isec is -1. Exception: if the", "definitions. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing the", "step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif \"type\" in protocol_definition and protocol_definition[\"type\"]", "step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim", "protocol definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing", "protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampThresholdProtocol:", "= { \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name,", "stochkv_det=None, ): \"\"\"Return a dict containing protocols. Args: apical_point_isec (int): section index of", "step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition =", "soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec = { \"somatic\": \"soma\",", "in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings", ") ) if \"extra_recordings\" in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location(", "location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus =", "stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the", "= protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if", "dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\", ]: logger.warning( \"vecstim", "recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name]", "or deterministic Returns: StepProtocol: Step Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"]", "{recording_definition['type']} not supported\") return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from", "Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"],", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and", "NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5", "variable=\"v\", ) ) if \"extra_recordings\" in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location =", "please set MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define", "name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read", "is neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of the extra recording", "sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type {recording_definition['type']} not supported\") return location def", "at least one of the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] ==", "mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else: protocols = list(protocols_dict.values()) return", "step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse(", "( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli,", "(list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) \"\"\" if", "found, but {prot} was found.\" f\"To use {prot_name}, please set MainProtocol.\" ) def", "syn_locs, ) if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"],", "protocol. Args: protocols_dict (dict): contains all protocols to be run If this function", "protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols,", "RampProtocol: Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"],", "name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read", "(dict): the dict to which to append the protocol protocol_name (str): name of", "no apical point is used in any extra recordings prot_path (str): path to", "= [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc,", "= get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location,", "protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition[\"var\"] recording = RecordingCustom(", "Netstim protocol from definitions. Args: protocol_name (str): name of the protocol protocol_definition (dict):", "protocol_name (str): name of the protocol protocol_definition (dict): dict containing the protocol data", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol from definitions. Args: protocol_name (str): name", "Args: protocol_name (str): name of the protocol protocol_definition (dict): contains the protocol configuration", "step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read", "# check the class name of each protocol for prot in protocols_dict.values(): if", "should contain the MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains", "protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec,", "return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol definition. Args:", ") step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is", "\"RampThresholdProtocol\", ] # check the class name of each protocol for prot in", "( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name,", "location of the extra recording \"\"\" if recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation(", "def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures of the main protocol. Args: protocols_dict", "at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in", "(int): apical point section index. Should be given if the recording definition \"type\"", "deterministic Returns: StepProtocol: Step Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if", "not set to 'python' nor to 'neuron' in config file.\" \"vecstim random will", "this protocol Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse(", "of the protocol protocol_definition (dict): dict containing the protocol data prefix (str): prefix", "ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in", "total_duration=step_definition[\"totduration\"], ) if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in", "on cell's threshold currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions,", "from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger =", "the protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to", "sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"],", "# pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, )", "License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0", "currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions =", "RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif \"type\" in protocol_definition", "point is used in any extra recordings prot_path (str): path to the protocols", ") var = recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return", "protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition", "re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"],", "used in naming responses, features, recordings, etc. apical_point_isec (int): apical point section index", "protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\":", "set if stochastic or deterministic prefix (str): prefix used in naming responses, features,", "recordings, stochkv_det=None ): \"\"\"Read step threshold protocol from definition. Args: protocol_name (str): name", "set to 'python' nor to 'neuron' in config file.\" \"vecstim random will be", "= efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\"", "random will be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom(", "syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) \"\"\"", "\"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc,", "stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol from definitions. Args:", "to 'neuron' in config file.\" \"vecstim random will be re-set to 'python'.\" )", "(str): morphology name to be used as prefix in output filenames syn_locs (list):", "or deterministic prefix (str): prefix used in naming responses, features, recordings, etc. apical_point_isec", "# Copyright 2020-2021 Blue Brain Project / EPFL # Licensed under the Apache", "def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step protocol from definition. Args:", "= recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return recordings def", "protocol_definitions.items(): if protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition, prefix,", "\"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings):", "type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise Exception( \"No MainProtocol found, but {prot}", "protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif \"type\"", "with this protocol Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus =", "= read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition and", "step threshold protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition", "protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = []", "\"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, )", "\"\"\"Read ramp threshold protocol from definition. Args: protocol_name (str): name of the protocol", "\"type\" in protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings,", "in the recording definition is neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location", "\"type\" of at least one of the extra recording definition Returns: list of", "for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim)", "location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol(", "protocol. Args: protocols_dict (dict): contains all protocols to be run Raises: Exception: If", "in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] =", ") location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\":", "the protocol protocol_definition (dict): dict containing the protocol data prefix (str): prefix used", "stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings)", "name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec == -1: raise", "seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec = { \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\",", "del protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not", "step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if", "protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step protocol from definition. Args: protocol_name (str): name", "synapses (if any, else None) Returns: dict containing the protocols \"\"\" with open(protocols_filepath,", "StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from emodelrunner.features", "(list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim", "total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, )", "total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name,", "ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type {recording_definition['type']} not supported\")", "definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "and apical_point_isec is -1. Exception: if the 'type' in the recording definition is", "data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampThresholdProtocol: Ramp Protocol", "step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus", "of each protocol for prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name =", "protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = [] for step_definition in", "else None) \"\"\" if \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] =", "Args: apical_point_isec (int): section index of the apical point Set to -1 no", "Exception: if the recording definition \"type\" is \"somadistanceapic\" and apical_point_isec is -1. Exception:", "protocol containing Vecstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not", "recordings ), prefix=prefix, ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name]", "See the License for the specific language governing permissions and # limitations under", "forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the class name", "): \"\"\"Read step protocol from definition. Args: protocol_name (str): name of the protocol", "= ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepThresholdProtocol( name=protocol_name,", "recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return recordings def add_protocol( protocols_dict,", "activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"],", ") else: stimuli = [] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"],", "protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None,", "create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict containing protocols.", "None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name,", "\"\"\"Get recordings from protocol definition. Args: protocol_name (str): name of the protocol protocol_definition", "in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in", "License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to", "protocol containing Netstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom(", "add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if \"Main\" in protocol_definitions.keys():", "\"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]:", "file.\" \"vecstim random will be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim", "read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step threshold protocol from definition. Args:", "(str): prefix used in naming responses, features, recordings, etc. apical_point_isec (int): apical point", "protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol from protocol definition to protocols", "step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None if stochkv_det is None: stochkv_det", "== \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix,", "main protocol. Args: protocols_dict (dict): contains all protocols to be run If this", "elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec == -1: raise Exception( \"Cannot record at", "the efeatures prefix (str): prefix used in naming responses, features, recordings, etc. \"\"\"", "prot_path (str): path to the protocols file features_path (str): path to the features", "# You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 #", "): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition", "definition to protocols dict. Args: protocols_dict (dict): the dict to which to append", "of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus", "\"\"\"Read Netstim protocol from definitions. Args: protocol_name (str): name of the protocol protocol_definition", ") return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition,", ") else: raise Exception(f\"Recording type {recording_definition['type']} not supported\") return location def get_recordings(protocol_name, protocol_definition,", "recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ):", "or deterministic Returns: StepThresholdProtocol: Step Protocol depending on cell's threshold currentd \"\"\" #", "apical point\" f\"if apical_point_isec is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]],", "in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise Exception( \"No MainProtocol", "in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det", "index of the apical point Set to -1 no apical point is used", "= ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None if", "the dict to which to append the protocol protocol_name (str): name of the", "if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else", "= read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"]", "of the extra recording \"\"\" if recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"],", "to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"],", "the protocols file features_path (str): path to the features file mtype (str): morphology", "def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step threshold protocol from definition.", "thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set", "be used if they are not in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\",", "record at a given distance from apical point\" f\"if apical_point_isec is {apical_point_isec}.\" )", "protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec,", "json import logging from bluepyopt import ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol,", "list of RecordingCustom \"\"\" recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", )", "recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations", "efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature =", "Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals", "elif recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else:", "of the main protocol. Args: protocols_dict (dict): contains all protocols to be run", "KIND, either express or implied. # See the License for the specific language", "(bool): set if stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol depending on cell's", "functions.\"\"\" # Copyright 2020-2021 Blue Brain Project / EPFL # Licensed under the", "containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs", "= define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else:", "[] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], )", "protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\"", "a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable", "location=location, variable=var, ) recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det,", "\"\"\"Set the efeatures of the main protocol. Args: protocols_dict (dict): contains all protocols", "for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], )", "\"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif \"type\" in protocol_definition", "if stochastic or deterministic prefix (str): prefix used in naming responses, features, recordings,", "read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step protocol from definition. Args: protocol_name", "efeatures prefix (str): prefix used in naming responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature", "threshold currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions", ") other_protocols = [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name])", "set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols", "protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ):", "import json import logging from bluepyopt import ephys from emodelrunner.protocols import ( RampProtocol,", "MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args:", "ANY KIND, either express or implied. # See the License for the specific", "def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for the extra recording. Args: recording_definition (dict):", "None if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition", "of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating synapses \"\"\"", "None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name,", "features file mtype (str): morphology name to be used as prefix in output", "step_definitions = [step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse(", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "not in [ \"python\", \"neuron\", ]: logger.warning( \"vecstim random not set to 'python'", "= ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is None: stochkv_det =", "def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol from definitions. Args: protocol_name (str):", "protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name", "\"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in", "{apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] ==", "Vecstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [", "point Set to -1 no apical point is used in any extra recordings", "= ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args:", "is present in protocols_dict \"\"\" # Those protocols cannot be used if they", "data apical_point_isec (int): apical point section index. Should be given if the recording", "protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): contains", "logger.warning( \"vecstim random not set to 'python' nor to 'neuron' in config file.\"", "and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif", "configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampThresholdProtocol: Ramp", "the synapses (if any, else None) Returns: dict containing the protocols \"\"\" with", "Step Protocol depending on cell's threshold currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions =", "\"nrnseclistcomp\" Returns: location of the extra recording \"\"\" if recording_definition[\"type\"] == \"somadistance\": location", "-1. Exception: if the 'type' in the recording definition is neither \"somadistance\", nor", "features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any,", "stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepProtocol(", "read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] ==", "location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec", "prefix=prefix, ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol(", "protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol from definitions. Args: protocol_name (str): name of", "protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\"", "and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif", "recordings, stochkv_det=None ): \"\"\"Read step protocol from definition. Args: protocol_name (str): name of", "pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures", "\"python\", \"neuron\", ]: logger.warning( \"vecstim random not set to 'python' nor to 'neuron'", "sec_index=0, comp_x=0.5 ) seclist_to_sec = { \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\":", "recordings, syn_locs): \"\"\"Read Vecstim protocol from definitions. Args: protocol_name (str): name of the", "apical_point_isec ) # add protocol to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings,", "prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict containing protocols. Args: apical_point_isec", "= protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\", ]: logger.warning( \"vecstim random", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "protocols file stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix used", "is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None )", "emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__)", "depending on cell's threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"],", "recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "all protocols to be run Raises: Exception: If a protocol that should only", "thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol from definition.", ") elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] =", "contains all protocols to be run If this function is called, should contain", "applicable law or agreed to in writing, software # distributed under the License", "(dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this", "\"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the class name of each protocol for prot", "= RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict", "recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"]", "under the License. import json import logging from bluepyopt import ephys from emodelrunner.protocols", "): \"\"\"Define protocols. Args: protocols_filename (str): path to the protocols file stochkv_det (bool):", "threshold protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict):", "protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[", "step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc,", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "\"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read", "of RecordingCustom \"\"\" recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) )", "stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename (str): path to the", "given if the recording definition \"type\" is \"somadistanceapic\" Raises: Exception: if the recording", "= read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition and", "= [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols =", "config file.\" \"vecstim random will be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\"", "= ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type {recording_definition['type']} not", "writing, software # distributed under the License is distributed on an \"AS IS\"", "use with this protocol Returns: RampThresholdProtocol: Ramp Protocol depending on cell's threshold current", "= ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepProtocol( name=protocol_name,", "comp_x=0.5 ) seclist_to_sec = { \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\",", "of the synapses (if any, else None) \"\"\" if \"type\" in protocol_definition and", "protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif (", "-1 no apical point is used in any extra recordings prot_path (str): path", "protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det )", "\"\"\"Add protocol from protocol definition to protocols dict. Args: protocols_dict (dict): the dict", "recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition[\"var\"] recording", "compliance with the License. # You may obtain a copy of the License", "recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol from definitions. Args: protocol_name", "to use with this protocol stochkv_det (bool): set if stochastic or deterministic prefix", "the protocol data prefix (str): prefix used in naming responses, features, recordings, etc.", "for the specific language governing permissions and # limitations under the License. import", "bluepyopt import ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol,", "ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return", "deterministic Returns: StepThresholdProtocol: Step Protocol depending on cell's threshold currentd \"\"\" # pylint:", "protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures prefix (str): prefix used in", "= [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\" in protocol_definition:", "define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc =", "= logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec = {", "[stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for the extra recording. Args:", "apical_point_isec is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif", "(str): path to the protocols file features_path (str): path to the features file", ") set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else: protocols = list(protocols_dict.values()) return ephys.protocols.SequenceProtocol(", "the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs (list", "protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"],", "\"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"],", "Protocol depending on cell's threshold currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"]", "(if any, else None) Returns: dict containing the protocols \"\"\" with open(protocols_filepath, \"r\",", ") protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported", "of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus", "step_definition else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, )", "copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law", "protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ),", "(str): name of the protocol protocol_definition (dict): dict containing the protocol data prefix", "apical_point_isec == -1: raise Exception( \"Cannot record at a given distance from apical", "the class name of each protocol for prot in protocols_dict.values(): if type(prot).__name__ in", "RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ):", "(the \"License\"); # you may not use this file except in compliance with", "syn_locs): \"\"\"Read Vecstim protocol from definitions. Args: protocol_name (str): name of the protocol", "= get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) # add protocol to protocol dict", "# Unless required by applicable law or agreed to in writing, software #", "[stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol from definitions. Args:", "name of each protocol for prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name", "the extra recording definition Returns: list of RecordingCustom \"\"\" recordings = [] recordings.append(", "by applicable law or agreed to in writing, software # distributed under the", "protocols to be run If this function is called, should contain the MainProtocol", "= ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc,", "read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] ==", "): \"\"\"Read step threshold protocol from definition. Args: protocol_name (str): name of the", "protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\", ]: logger.warning( \"vecstim random not", "} def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold protocol from definition. Args: protocol_name", "].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict", "protocol to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, )", "file except in compliance with the License. # You may obtain a copy", "extra recording \"\"\" if recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"],", "for recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition[\"var\"]", "location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None if stochkv_det is None: stochkv_det =", "# Those protocols cannot be used if they are not in MainProtocol forbidden_prots", "total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse(", "morphology name to be used as prefix in output filenames syn_locs (list): list", "for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols,", "in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings,", "stochkv_det=None ): \"\"\"Read step threshold protocol from definition. Args: protocol_name (str): name of", "mtype (str): morphology name to be used as prefix in output filenames syn_locs", "protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0,", "\"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold protocol from", "at a given distance from apical point\" f\"if apical_point_isec is {apical_point_isec}.\" ) location", "Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"]", "the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures prefix (str): prefix", "holding_stimulus = None if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\"", "etc. apical_point_isec (int): apical point section index Should be given if there is", "ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is None: stochkv_det = (", "protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif (", "emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim", "for prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise Exception(", "add protocol to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs,", "syn_locs=None, ): \"\"\"Add protocol from protocol definition to protocols dict. Args: protocols_dict (dict):", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol(", "\"type\" in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings,", "section index. Should be given if the recording definition \"type\" is \"somadistanceapic\" Raises:", "protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec ) var =", "the synapses (if any, else None) \"\"\" if \"type\" in protocol_definition and protocol_definition[\"type\"]", "set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else: protocols = list(protocols_dict.values()) return ephys.protocols.SequenceProtocol( \"all", "protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det", "the 'type' in the recording definition is neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\"", "[] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\" in protocol_definition: for", "\"type\" is \"somadistanceapic\" and apical_point_isec is -1. Exception: if the 'type' in the", ") elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec == -1: raise Exception( \"Cannot record", "efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\",", "RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from", "recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None)", "protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step threshold protocol from definition. Args: protocol_name", "thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol from definition. Args:", "used in naming responses, features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of", "use with this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepThresholdProtocol:", "cell's threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc,", "features_path (str): path to the features file mtype (str): morphology name to be", "protocol from protocol definition to protocols dict. Args: protocols_dict (dict): the dict to", "StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from emodelrunner.features import", "protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings(", "= protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return", "protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol", "Exception(f\"Recording type {recording_definition['type']} not supported\") return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get", "protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step protocol from definition. Args: protocol_name (str):", "protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings )", "step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, )", "= RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif \"type\" in", "and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings", "protocols file features_path (str): path to the features file mtype (str): morphology name", "the MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures", "section index of the apical point Set to -1 no apical point is", "contains the extra recording configuration data apical_point_isec (int): apical point section index. Should", "protocol for prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise", "\"\"\" if \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name,", "with this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepThresholdProtocol: Step", "EPFL # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", ") return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol", "definition is neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of the extra", "Exception: if the 'type' in the recording definition is neither \"somadistance\", nor \"somadistanceapic\",", "responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ]", "== \"somadistanceapic\": if apical_point_isec == -1: raise Exception( \"Cannot record at a given", "protocol Returns: RampThresholdProtocol: Ramp Protocol depending on cell's threshold current \"\"\" ramp_definition =", "recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for the extra recording. Args: recording_definition", "recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise", "seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type {recording_definition['type']} not supported\") return location def get_recordings(protocol_name,", "(bool): set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the", "the recording definition \"type\" is \"somadistanceapic\" Raises: Exception: if the recording definition \"type\"", "used if they are not in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\",", "protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not in", "RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition,", "any, else None) \"\"\" if \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name]", "protocol_definition, recordings): \"\"\"Read ramp threshold protocol from definition. Args: protocol_name (str): name of", "protocols_filename (str): path to the protocols file stochkv_det (bool): set if stochastic or", "check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures of the main", "ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"]", "apical_point_isec=-1): \"\"\"Get recordings from protocol definition. Args: protocol_name (str): name of the protocol", "prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol definition. Args: protocol_name (str): name of the", "\"stochkv_det\" in step_definition else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings,", "protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif \"type\"", "recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step threshold", "if protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec", "recording_definition, apical_point_isec ) var = recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, )", "of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) Returns: dict containing", "and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else:", "{prot_name}, please set MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ):", "recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step protocol from", "in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec ) var", "responses, features, recordings, etc. apical_point_isec (int): apical point section index Should be given", "the extra recording \"\"\" if recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"],", "elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol(", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings", "StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None", "= protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse(", "= RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = [] for protocol_name", "elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol(", "protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] =", "list of synapse locations stochkv_det (bool): set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol:", "\"\"\" if recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif", "given distance from apical point\" f\"if apical_point_isec is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation(", "is used in any extra recordings prot_path (str): path to the protocols file", "f\"if apical_point_isec is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, )", "SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol from definitions.", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "to use with this protocol Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"]", "if the recording definition \"type\" is \"somadistanceapic\" and apical_point_isec is -1. Exception: if", "ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict", "containing Vecstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in", "= ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\": location =", "rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures,", "ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"],", "protocol_definition, recordings, syn_locs ) else: stimuli = [] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append(", "type {recording_definition['type']} not supported\") return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings", "= ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]:", "RecordingCustom \"\"\" recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if", "this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol", "].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols(", "comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type {recording_definition['type']} not supported\") return location", "in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs", "\"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\"", "protocols = [protocols_dict[\"Main\"]] else: protocols = list(protocols_dict.values()) return ephys.protocols.SequenceProtocol( \"all protocols\", protocols=protocols, )", "stimuli = [] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc,", "Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc,", "logging from bluepyopt import ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol,", "\"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\" in", "step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus,", "containing the protocol data prefix (str): prefix used in naming responses, features, recordings,", "protocols_dict \"\"\" # Those protocols cannot be used if they are not in", "protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set", "the efeatures of the main protocol. Args: protocols_dict (dict): contains all protocols to", "ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom,", "(bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampThresholdProtocol: Ramp Protocol depending on", "(int): section index of the apical point Set to -1 no apical point", ") else: holding_stimulus = None if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"]", "the License for the specific language governing permissions and # limitations under the", "{prot} was found.\" f\"To use {prot_name}, please set MainProtocol.\" ) def define_protocols( protocols_filepath,", "with this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepProtocol: Step", "the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed", "ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation(", "recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol from definition. Args: protocol_name", "Raises: Exception: if the recording definition \"type\" is \"somadistanceapic\" and apical_point_isec is -1.", "apical_point_isec (int): apical point section index Should be given if there is \"somadistanceapic\"", "to use with this protocol Returns: RampThresholdProtocol: Ramp Protocol depending on cell's threshold", ") return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for the", "= efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ):", "get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for the extra recording. Args: recording_definition (dict): contains", "stochkv_det, prefix, syn_locs, ) if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"],", "protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"],", "found.\" f\"To use {prot_name}, please set MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\",", "synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating synapses \"\"\" stim_definition =", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "import ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol,", "step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings )", "path to the features file mtype (str): morphology name to be used as", "used with MainProtocol is present in protocols_dict \"\"\" # Those protocols cannot be", "protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]:", "to protocols dict. Args: protocols_dict (dict): the dict to which to append the", "\"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name, protocol_definition in protocol_definitions.items():", "location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings,", "in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols =", "locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating synapses", "\"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli = []", "None) Returns: dict containing the protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file:", "was found.\" f\"To use {prot_name}, please set MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None,", "index. Should be given if the recording definition \"type\" is \"somadistanceapic\" Raises: Exception:", "name of the protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording):", "ThresholdDetection) efeatures (dict): contains the efeatures prefix (str): prefix used in naming responses,", "obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by", "name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\" in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]:", "syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition,", "def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename (str):", "(bool): set if stochastic or deterministic prefix (str): prefix used in naming responses,", "(list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) Returns: dict", "\"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\": if", "\"vecstim random will be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim =", "recordings, stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ):", "read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"]", "stochkv_det=None ): \"\"\"Read step protocol from definition. Args: protocol_name (str): name of the", "\"extra_recordings\" in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec )", "protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"]", "recordings, syn_locs ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] =", "if the 'type' in the recording definition is neither \"somadistance\", nor \"somadistanceapic\", nor", "efeatures of the main protocol. Args: protocols_dict (dict): contains all protocols to be", "= [] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"],", "ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) Returns: dict containing the", "Version 2.0 (the \"License\"); # you may not use this file except in", "the License. import json import logging from bluepyopt import ephys from emodelrunner.protocols import", "read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"]", "configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set", "RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger", "= ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"],", "prefix used in naming responses, features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations", "the recording definition is neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of", "Project / EPFL # Licensed under the Apache License, Version 2.0 (the \"License\");", "(str): path to the features file mtype (str): morphology name to be used", "not supported\") return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol", "in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"],", "extra recording configuration data apical_point_isec (int): apical point section index. Should be given", "random not set to 'python' nor to 'neuron' in config file.\" \"vecstim random", "other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the", "definition \"type\" is \"somadistanceapic\" and apical_point_isec is -1. Exception: if the 'type' in", "read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol from definition. Args: protocol_name (str): name of", "ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition =", "Exception: If a protocol that should only be used with MainProtocol is present", "Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"]", "Should be given if the recording definition \"type\" is \"somadistanceapic\" Raises: Exception: if", "\"somadistanceapic\" in \"type\" of at least one of the extra recordings syn_locs (list", "locations of the synapses (if any, else None) Returns: dict containing the protocols", "= define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if \"Main\" in protocols_dict: efeatures", "the main protocol. Args: protocols_dict (dict): contains all protocols to be run If", "\"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition,", "not in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) #", "recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\" in", "location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol definition. Args: protocol_name", "\"vecstim random not set to 'python' nor to 'neuron' in config file.\" \"vecstim", "recordings, syn_locs ) else: stimuli = [] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse(", "rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix):", "in \"type\" of at least one of the extra recordings syn_locs (list of", "Should be given if there is \"somadistanceapic\" in \"type\" of at least one", "apical_point_isec ) var = recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording)", "json.load(protocol_file) if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name, protocol_definition", "filenames syn_locs (list): list of synapse locations stochkv_det (bool): set if stochastic or", "run Raises: Exception: If a protocol that should only be used with MainProtocol", "extra recordings prot_path (str): path to the protocols file features_path (str): path to", "protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if \"Main\"", "total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det", "at least one of the extra recording definition Returns: list of RecordingCustom \"\"\"", "): \"\"\"Return a dict containing protocols. Args: apical_point_isec (int): section index of the", "'type' in the recording definition is neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns:", "to use with this protocol stochkv_det (bool): set if stochastic or deterministic Returns:", "\"\"\" # Those protocols cannot be used if they are not in MainProtocol", "step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"],", "seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec == -1: raise Exception( \"Cannot", "apical point is used in any extra recordings prot_path (str): path to the", "= \"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name,", "is called, should contain the MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures", "recordings from protocol definition. Args: protocol_name (str): name of the protocol protocol_definition (dict):", "protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean", "pre_protocols = [] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"]", "\"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp", "\"\"\"Read step threshold protocol from definition. Args: protocol_name (str): name of the protocol", "be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"],", "\"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs,", "nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of the extra recording \"\"\" if recording_definition[\"type\"]", "stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepThresholdProtocol(", "use with this protocol stochkv_det (bool): set if stochastic or deterministic prefix (str):", "stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for", "protocol stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix used in", "in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition[\"var\"] recording =", ") # add protocol to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det,", "== \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording", "logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec =", "get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var,", "one of the extra recording definition Returns: list of RecordingCustom \"\"\" recordings =", "efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ]", "protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampProtocol:", "if stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol depending on cell's threshold currentd", "soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"],", "protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif", "OF ANY KIND, either express or implied. # See the License for the", "logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec = { \"somatic\":", "responses, features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if", "(list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim", ") elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] =", ") recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None,", "Args: protocols_dict (dict): contains all protocols to be run Raises: Exception: If a", "= ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"],", "thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol from", "[ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the class name of each", "else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def", "is -1. Exception: if the 'type' in the recording definition is neither \"somadistance\",", "\"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in", "] # check the class name of each protocol for prot in protocols_dict.values():", "protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = [] for", "\"neuron\", ]: logger.warning( \"vecstim random not set to 'python' nor to 'neuron' in", "locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating synapses", "protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli", "(bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition", "= read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif \"type\" in protocol_definition and protocol_definition[\"type\"]", "\"Main\" in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype)", "protocol that should only be used with MainProtocol is present in protocols_dict \"\"\"", "in protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs", "( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name,", "a dict containing protocols. Args: apical_point_isec (int): section index of the apical point", "name of the protocol protocol_definition (dict): dict containing the protocol data prefix (str):", "(str): name of the protocol protocol_definition (dict): dict containing the protocol data recordings", "protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols =", "protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\" in protocol_definitions[\"Main\"]:", "= [step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"],", "(dict): contains all protocols to be run Raises: Exception: If a protocol that", "holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step protocol", "is \"somadistanceapic\" in \"type\" of at least one of the extra recordings syn_locs", "set MainProtocol.\" ) def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols.", "= NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def", "containing protocols. Args: apical_point_isec (int): section index of the apical point Set to", "disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = []", "with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\" in protocol_definitions:", "return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for the extra", "this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepProtocol: Step Protocol", "from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): contains the", "== -1: raise Exception( \"Cannot record at a given distance from apical point\"", "): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif", "{ \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition,", ") def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step protocol from definition.", "or agreed to in writing, software # distributed under the License is distributed", "synapses (if any, else None) \"\"\" if \"type\" in protocol_definition and protocol_definition[\"type\"] ==", "Args: protocols_dict (dict): contains all protocols to be run If this function is", "protocol containing all the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols(", "License. import json import logging from bluepyopt import ephys from emodelrunner.protocols import (", "RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def", "prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename (str): path to the protocols", "step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"]", "elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition,", "data prefix (str): prefix used in naming responses, features, recordings, etc. apical_point_isec (int):", "name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type {recording_definition['type']} not supported\") return", "activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\",", "SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import", "= [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"],", "use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns:", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args: protocols_dict (dict): contains all protocols to be", "name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"],", "mtype) protocols = [protocols_dict[\"Main\"]] else: protocols = list(protocols_dict.values()) return ephys.protocols.SequenceProtocol( \"all protocols\", protocols=protocols,", "\"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the class name of each protocol for", "ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus =", "called, should contain the MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict):", "index Should be given if there is \"somadistanceapic\" in \"type\" of at least", "return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add", "apical_point_isec (int): apical point section index. Should be given if the recording definition", "= type(prot).__name__ raise Exception( \"No MainProtocol found, but {prot} was found.\" f\"To use", "used in naming responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature =", "'python' nor to 'neuron' in config file.\" \"vecstim random will be re-set to", "location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type {recording_definition['type']}", ") def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step threshold protocol from", "[] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], )", "\"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold", "the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det", "protocol_name (str): name of the protocol protocol_definition (dict): contains the protocol configuration data", "recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current =", "this function is called, should contain the MainProtocol and the associated protocols (RinHoldCurrent,", "Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"],", "\"\"\" recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\"", "= [step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"],", "should only be used with MainProtocol is present in protocols_dict \"\"\" # Those", "stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name,", "used in any extra recordings prot_path (str): path to the protocols file features_path", "if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"],", "a protocol containing Netstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim =", "with MainProtocol is present in protocols_dict \"\"\" # Those protocols cannot be used", "protocol_definitions = json.load(protocol_file) if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {} for", "step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings", "License, Version 2.0 (the \"License\"); # you may not use this file except", "stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings,", "def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict containing", "prefix (str): prefix used in naming responses, features, recordings, etc. apical_point_isec (int): apical", "stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1):", "protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol.", "protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise Exception( \"No MainProtocol found,", "location = get_extra_recording_location( recording_definition, apical_point_isec ) var = recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\",", ") ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for", "pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if", "definition \"type\" is \"somadistanceapic\" Raises: Exception: if the recording definition \"type\" is \"somadistanceapic\"", "= None if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in", "recordings, stochkv_det, prefix, syn_locs, ) if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\",", "if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for", "else: stimuli = [] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"],", ") seclist_to_sec = { \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\": \"axon\", \"myelinated\": \"myelin\", }", "= None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition,", "(list): list of synapse locations stochkv_det (bool): set if stochastic or deterministic Returns:", "Protocol depending on cell's threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse(", "\"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"],", "step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None if stochkv_det is", "stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return", "of the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if", "to be run If this function is called, should contain the MainProtocol and", "is \"somadistanceapic\" in \"type\" of at least one of the extra recording definition", "def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol from definitions. Args: protocol_name (str):", "\"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols", "limitations under the License. import json import logging from bluepyopt import ephys from", ") def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args: protocols_dict (dict): contains all protocols", "containing the protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file)", "= [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the class name of", "stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args: protocols_dict (dict): contains", "get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) # add protocol to protocol dict add_protocol(", ") from emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import (", "dict): step_definitions = [step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim =", "recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args: protocols_dict (dict): contains all", "use with this protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepProtocol:", "\"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def", "mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict containing protocols. Args: apical_point_isec (int): section", "\"axonal\": \"axon\", \"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold protocol", "this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a", "total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name,", "is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec, ) elif recording_definition[\"type\"]", "of at least one of the extra recording definition Returns: list of RecordingCustom", "ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step", "locations of the synapses (if any, else None) \"\"\" if \"type\" in protocol_definition", "\"somadistanceapic\" and apical_point_isec is -1. Exception: if the 'type' in the recording definition", "Set to -1 no apical point is used in any extra recordings prot_path", "NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name,", "or implied. # See the License for the specific language governing permissions and", "protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures of the main protocol. Args:", "recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampProtocol: Ramp Protocol \"\"\"", "least one of the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the", "elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol(", "ramp protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict):", "locations stochkv_det (bool): set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing", "If this function is called, should contain the MainProtocol and the associated protocols", "( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import", "protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and", "return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings):", "syn_locs ) else: stimuli = [] for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"],", "recordings, stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ):", "define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename (str): path", "if \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition,", "protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name,", "\"\"\"Get the location for the extra recording. Args: recording_definition (dict): contains the extra", "stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim],", "ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol", "recordings): \"\"\"Read ramp protocol from definition. Args: protocol_name (str): name of the protocol", "for the extra recording. Args: recording_definition (dict): contains the extra recording configuration data", "(str): prefix used in naming responses, features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation):", "if \"stochkv_det\" in step_definition else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings,", ") elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name] =", "output filenames syn_locs (list): list of synapse locations stochkv_det (bool): set if stochastic", "step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"],", "any, else None) Returns: dict containing the protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\")", "recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol from protocol definition to protocols dict.", "] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None,", "depending on cell's threshold currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if", "of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Netstim stimulus activating synapses \"\"\"", "{} for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings", "syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename (str): path to the protocols file stochkv_det", "total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check", "use this file except in compliance with the License. # You may obtain", "RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli", "holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus,", "prot_name = type(prot).__name__ raise Exception( \"No MainProtocol found, but {prot} was found.\" f\"To", "stochkv_det, mtype, apical_point_isec, syn_locs, ) if \"Main\" in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"],", "protocol definition to protocols dict. Args: protocols_dict (dict): the dict to which to", "protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\"", ") def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol from definitions. Args: protocol_name", "efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]]", "Exception( \"Cannot record at a given distance from apical point\" f\"if apical_point_isec is", "return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings,", "Ramp Protocol depending on cell's threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus =", "nor to 'neuron' in config file.\" \"vecstim random will be re-set to 'python'.\"", "from definitions. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing", "else: holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol(", "= ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec ==", "stochastic or deterministic Returns: StepProtocol: Step Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions =", "step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in", "ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None if stochkv_det", "\"\"\"Protocol-related functions.\"\"\" # Copyright 2020-2021 Blue Brain Project / EPFL # Licensed under", "= efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path,", "apical point section index Should be given if there is \"somadistanceapic\" in \"type\"", "in naming responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[", "cell's threshold currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict):", "in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"],", "read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold protocol from definition. Args: protocol_name (str): name", "step_definition else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def", "protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs )", "holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name,", "protocol Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"],", "MainProtocol found, but {prot} was found.\" f\"To use {prot_name}, please set MainProtocol.\" )", "NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 )", "stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol depending on cell's threshold currentd \"\"\"", "data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation):", ") if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"],", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) # add protocol to protocol", "\"type\" is \"somadistanceapic\" Raises: Exception: if the recording definition \"type\" is \"somadistanceapic\" and", "holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step", "protocol stochkv_det (bool): set if stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol depending", "syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition,", "as protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict =", "RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures", "containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det", "\"somadistanceapic\" Raises: Exception: if the recording definition \"type\" is \"somadistanceapic\" and apical_point_isec is", "location for the extra recording. Args: recording_definition (dict): contains the extra recording configuration", "and # limitations under the License. import json import logging from bluepyopt import", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "): \"\"\"Add protocol from protocol definition to protocols dict. Args: protocols_dict (dict): the", "type(prot).__name__ raise Exception( \"No MainProtocol found, but {prot} was found.\" f\"To use {prot_name},", "extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else", "prefix in output filenames syn_locs (list): list of synapse locations stochkv_det (bool): set", "protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol from protocol definition", "to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if", "(dict): contains the efeatures prefix (str): prefix used in naming responses, features, recordings,", "other_protocols = [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols", "file mtype (str): morphology name to be used as prefix in output filenames", "Returns: StepThresholdProtocol: Step Protocol depending on cell's threshold currentd \"\"\" # pylint: disable=undefined-loop-variable", "else: raise Exception(f\"Recording type {recording_definition['type']} not supported\") return location def get_recordings(protocol_name, protocol_definition, prefix,", "in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) # add", "of at least one of the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations", "naming responses, features, recordings, etc. apical_point_isec (int): apical point section index Should be", "too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if \"Main\" in", "in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if", "recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return recordings def add_protocol(", "\"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in", "'neuron' in config file.\" \"vecstim random will be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"]", "if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\",", "RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = [] for protocol_name in", "permissions and # limitations under the License. import json import logging from bluepyopt", ") step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"],", "with the License. # You may obtain a copy of the License at", "f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage", "name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings,", "def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold protocol from definition. Args: protocol_name (str):", "Blue Brain Project / EPFL # Licensed under the Apache License, Version 2.0", "protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli = [] for", "run If this function is called, should contain the MainProtocol and the associated", "= NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings)", "step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], )", "= protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = [] for step_definition", "recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol from definitions.", "law or agreed to in writing, software # distributed under the License is", "name to be used as prefix in output filenames syn_locs (list): list of", "prefix, syn_locs, ) if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"],", "recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampThresholdProtocol: Ramp Protocol depending", "raise Exception( \"No MainProtocol found, but {prot} was found.\" f\"To use {prot_name}, please", "location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse(", "\"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus", "stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs):", "(dict): dict containing the protocol data prefix (str): prefix used in naming responses,", "in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name, protocol_definition in protocol_definitions.items(): if", "variable=var, ) recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix,", "distance from apical point\" f\"if apical_point_isec is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"],", "\"type\" of at least one of the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation):", "step protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition (dict):", "in compliance with the License. # You may obtain a copy of the", "apical_point_isec=-1): \"\"\"Get the location for the extra recording. Args: recording_definition (dict): contains the", "to be used as prefix in output filenames syn_locs (list): list of synapse", "the protocol protocol_name (str): name of the protocol protocol_definition (dict): dict containing the", "ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None return RampProtocol(", "StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs):", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "in step_definition else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, )", "to append the protocol protocol_name (str): name of the protocol protocol_definition (dict): dict", "from bluepyopt import ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol,", "Returns: StepProtocol: Step Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions,", "return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim protocol from", "from protocol definition. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict", "step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus,", "\"\"\"Check for unsupported protocol. Args: protocols_dict (dict): contains all protocols to be run", "def add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol from", "protocols. Args: apical_point_isec (int): section index of the apical point Set to -1", "be used with MainProtocol is present in protocols_dict \"\"\" # Those protocols cannot", "location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if", "definition Returns: list of RecordingCustom \"\"\" recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc,", "protocol_name, protocol_definition, prefix, apical_point_isec ) # add protocol to protocol dict add_protocol( protocols_dict,", "and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures prefix (str):", "import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc", "and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif", "stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location", "set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures of the main protocol. Args: protocols_dict (dict):", "MainProtocol is present in protocols_dict \"\"\" # Those protocols cannot be used if", "[\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition, prefix, apical_point_isec ) # add protocol", "if \"Main\" in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures,", "\"Cannot record at a given distance from apical point\" f\"if apical_point_isec is {apical_point_isec}.\"", "holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in", "\"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict,", "2020-2021 Blue Brain Project / EPFL # Licensed under the Apache License, Version", "recordings to use with this protocol Returns: RampThresholdProtocol: Ramp Protocol depending on cell's", "f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\",", "to use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses", "name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec = { \"somatic\": \"soma\", \"apical\": \"apic\", \"axonal\":", "disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if \"Main\"", "None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings,", "append the protocol protocol_name (str): name of the protocol protocol_definition (dict): dict containing", "]: logger.warning( \"vecstim random not set to 'python' nor to 'neuron' in config", "current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], )", "\"type\" in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\", step_protocol_template=read_step_protocol(", "protocol data prefix (str): prefix used in naming responses, features, recordings, etc. apical_point_isec", "contains the efeatures prefix (str): prefix used in naming responses, features, recordings, etc.", "protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with", "protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\"", "step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"]", "may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required", ") def define_protocols( protocols_filepath, stochkv_det=None, prefix=\"\", apical_point_isec=-1, syn_locs=None, ): \"\"\"Define protocols. Args: protocols_filename", "protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition", "sec_index=apical_point_isec, ) elif recording_definition[\"type\"] == \"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"],", "recordings, syn_locs): \"\"\"Read Netstim protocol from definitions. Args: protocol_name (str): name of the", "Brain Project / EPFL # Licensed under the Apache License, Version 2.0 (the", "for unsupported protocol. Args: protocols_dict (dict): contains all protocols to be run Raises:", "if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols \"\"\"", "\"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of the extra recording \"\"\" if recording_definition[\"type\"] ==", "(str): path to the protocols file stochkv_det (bool): set if stochastic or deterministic", "else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol(", "features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current", "protocols_dict = define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if \"Main\" in protocols_dict:", ") def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp protocol from definition. Args: protocol_name (str):", "or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols \"\"\" # pylint:", "in naming responses, features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the", "holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is None: stochkv_det", "== \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\":", "containing Netstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs,", "\"myelinated\": \"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold protocol from definition.", "with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom:", "Raises: Exception: If a protocol that should only be used with MainProtocol is", ") if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition", "with this protocol stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix", "ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition", "protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\"", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "from apical point\" f\"if apical_point_isec is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"],", "data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampProtocol: Ramp Protocol", "of synapse locations stochkv_det (bool): set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence", "synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating synapses \"\"\" stim_definition =", "is \"somadistanceapic\" and apical_point_isec is -1. Exception: if the 'type' in the recording", "in naming responses, features, recordings, etc. apical_point_isec (int): apical point section index Should", "ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus =", ") if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol( \"RinHoldCurrent\", rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix,", "protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {}", "RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\" in protocol_definition: for recording_definition in", "specific language governing permissions and # limitations under the License. import json import", "supported\") return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol definition.", "holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol", "dict. Args: protocols_dict (dict): the dict to which to append the protocol protocol_name", "emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if", "prefix used in naming responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature", "return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures of the main protocol.", "(dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this", "be given if the recording definition \"type\" is \"somadistanceapic\" Raises: Exception: if the", "this file except in compliance with the License. # You may obtain a", "cannot be used if they are not in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\",", "= ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]:", "protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol from protocol definition to", "for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if", "(str): prefix used in naming responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"]", "\"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the class name of each protocol", "MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures prefix", "protocol_definition, recordings): \"\"\"Read ramp protocol from definition. Args: protocol_name (str): name of the", "prefix): \"\"\"Set the efeatures of the main protocol. Args: protocols_dict (dict): contains all", "# http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing,", "= [] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] =", "= read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli = [] for stimulus_definition", "): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in", "protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus =", "ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], )", "protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] ==", "name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args: protocols_dict (dict):", "recording. Args: recording_definition (dict): contains the extra recording configuration data apical_point_isec (int): apical", "efeatures, prefix): \"\"\"Set the efeatures of the main protocol. Args: protocols_dict (dict): contains", "governing permissions and # limitations under the License. import json import logging from", "protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ):", "recording definition Returns: list of RecordingCustom \"\"\" recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\",", "to the protocols file features_path (str): path to the features file mtype (str):", "file stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix used in", "import logging from bluepyopt import ephys from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol,", "of the protocol protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings", "syn_locs ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol(", "in protocol_definitions.items(): if protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name, protocol_definition,", "protocol protocol_definition (dict): dict containing the protocol data prefix (str): prefix used in", "recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] ==", "protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else: check_for_forbidden_protocol(protocols_dict) return", "protocol_definition in protocol_definitions.items(): if protocol_name not in [\"Main\", \"RinHoldcurrent\"]: recordings = get_recordings( protocol_name,", "point section index Should be given if there is \"somadistanceapic\" in \"type\" of", "stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol containing all the protocols \"\"\" #", "stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read", "recording configuration data apical_point_isec (int): apical point section index. Should be given if", "as prefix in output filenames syn_locs (list): list of synapse locations stochkv_det (bool):", "# pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli", "the apical point Set to -1 no apical point is used in any", "but {prot} was found.\" f\"To use {prot_name}, please set MainProtocol.\" ) def define_protocols(", "containing all the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path,", "of the synapses (if any, else None) Returns: dict containing the protocols \"\"\"", "recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\", ) ) if \"extra_recordings\" in protocol_definition: for recording_definition", "\"somadistanceapic\" in \"type\" of at least one of the extra recording definition Returns:", "\"nrnseclistcomp\": location = ephys.locations.NrnSeclistCompLocation( name=recording_definition[\"name\"], comp_x=recording_definition[\"comp_x\"], sec_index=recording_definition[\"sec_index\"], seclist_name=recording_definition[\"seclist_name\"], ) else: raise Exception(f\"Recording type", "extra recording definition Returns: list of RecordingCustom \"\"\" recordings = [] recordings.append( RecordingCustom(", "get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol definition. Args: protocol_name (str): name", "contain the MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the", "deterministic prefix (str): prefix used in naming responses, features, recordings, etc. syn_locs (list", ") if \"extra_recordings\" in protocol_definition: for recording_definition in protocol_definition[\"extra_recordings\"]: location = get_extra_recording_location( recording_definition,", "protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if \"Main\" in protocol_definitions.keys(): protocols_dict[\"RinHoldcurrent\"] = RatSSCxRinHoldcurrentProtocol(", "is \"somadistanceapic\" Raises: Exception: if the recording definition \"type\" is \"somadistanceapic\" and apical_point_isec", "from emodelrunner.protocols import ( RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, )", "if stochastic or deterministic Returns: StepProtocol: Step Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions", "== \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\"", "Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions =", "= RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return recordings def add_protocol( protocols_dict, protocol_name,", "required by applicable law or agreed to in writing, software # distributed under", "protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude", "protocol_name, protocol_definition, recordings, syn_locs ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Netstim\":", "the location for the extra recording. Args: recording_definition (dict): contains the extra recording", "syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) Returns:", "encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict", "deterministic prefix (str): prefix used in naming responses, features, recordings, etc. apical_point_isec (int):", "features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else: protocols = list(protocols_dict.values())", "stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get", "), prefix=prefix, ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] =", "total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None if stochkv_det is None: stochkv_det = (", ") return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition,", "] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage =", "if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise Exception( \"No MainProtocol found, but", "apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict containing protocols. Args:", "var = recording_definition[\"var\"] recording = RecordingCustom( name=f\"{prefix}.{protocol_name}.{location.name}.{var}\", location=location, variable=var, ) recordings.append(recording) return recordings", "(str): name of the protocol protocol_definition (dict): contains the protocol configuration data recordings", "stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name]", "the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool):", "protocol_definition, recordings, syn_locs ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name]", "recordings to use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the", "== \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\"", "prefix, syn_locs=None, ): \"\"\"Add protocol from protocol definition to protocols dict. Args: protocols_dict", "= {} for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not in [\"Main\", \"RinHoldcurrent\"]:", "\"myelin\", } def read_ramp_threshold_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read ramp threshold protocol from definition. Args:", "language governing permissions and # limitations under the License. import json import logging", "in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings", ") if \"Main\" in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict,", "ramp threshold protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition", "holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else:", "step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim)", "(dict): contains all protocols to be run If this function is called, should", "a given distance from apical point\" f\"if apical_point_isec is {apical_point_isec}.\" ) location =", "Args: protocols_filename (str): path to the protocols file stochkv_det (bool): set if stochastic", "to 'python' nor to 'neuron' in config file.\" \"vecstim random will be re-set", "f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a", "MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check the class", "\"stochkv_det\" in step_definition else None ) return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det,", "protocols. Args: protocols_filename (str): path to the protocols file stochkv_det (bool): set if", "all the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict = define_protocols( prot_path, stochkv_det,", "not in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ] # check", "protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with", "StepProtocol: Step Protocol \"\"\" # pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict):", "stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\" ): protocols_dict[protocol_name]", "associated protocols (RinHoldCurrent, ThresholdDetection) efeatures (dict): contains the efeatures prefix (str): prefix used", "in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, )", "# you may not use this file except in compliance with the License.", "will be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom( syn_locs,", "dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs, ) if \"Main\" in", "Returns: list of RecordingCustom \"\"\" recordings = [] recordings.append( RecordingCustom( name=f\"{prefix}.{protocol_name}.soma.v\", location=soma_loc, variable=\"v\",", "protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step threshold protocol from definition. Args: protocol_name (str):", "[] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol(", "the extra recording. Args: recording_definition (dict): contains the extra recording configuration data apical_point_isec", "protocol from definitions. Args: protocol_name (str): name of the protocol protocol_definition (dict): dict", "apical_point_isec (int): section index of the apical point Set to -1 no apical", "protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif \"type\" in protocol_definition and", "NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def", "configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns: RampProtocol: Ramp", "protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif (", "elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition,", "if they are not in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\",", "emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\",", "return StepProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings,", "the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol Returns:", "dict containing protocols. Args: apical_point_isec (int): section index of the apical point Set", "recordings): \"\"\"Read ramp threshold protocol from definition. Args: protocol_name (str): name of the", "recording \"\"\" if recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], )", "synapse locations stochkv_det (bool): set if stochastic or deterministic Returns: ephys.protocols.SequenceProtocol: sequence protocol", "efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ].exp_mean def create_protocols( apical_point_isec, prot_path, features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return", "other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name in protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name])", "protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool):", "for stimulus_definition in protocol_definition[\"stimuli\"]: stimuli.append( ephys.stimuli.NrnSquarePulse( step_amplitude=stimulus_definition[\"amp\"], step_delay=stimulus_definition[\"delay\"], step_duration=stimulus_definition[\"duration\"], location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) )", "if apical_point_isec == -1: raise Exception( \"Cannot record at a given distance from", "\"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings,", "License for the specific language governing permissions and # limitations under the License.", "( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] = RatSSCxThresholdDetectionProtocol( \"IDRest\",", ") elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name,", "in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path, mtype, ) set_main_protocol_efeatures(protocols_dict, efeatures, mtype) protocols", "step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None if stochkv_det is None:", "(dict): contains the extra recording configuration data apical_point_isec (int): apical point section index.", "only be used with MainProtocol is present in protocols_dict \"\"\" # Those protocols", "ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], )", "recording_definition (dict): contains the extra recording configuration data apical_point_isec (int): apical point section", "\"License\"); # you may not use this file except in compliance with the", "syn_locs (list): list of synapse locations stochkv_det (bool): set if stochastic or deterministic", "step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) if stochkv_det is None:", "naming responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature = efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\"", "each protocol for prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__", "from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation(", "set if stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol depending on cell's threshold", "holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None", "to the protocols file stochkv_det (bool): set if stochastic or deterministic prefix (str):", "sequence protocol containing all the protocols \"\"\" # pylint: disable=unbalanced-tuple-unpacking, too-many-locals protocols_dict =", "\"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif ( \"type\"", "\"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim],", "in writing, software # distributed under the License is distributed on an \"AS", "efeatures (dict): contains the efeatures prefix (str): prefix used in naming responses, features,", "protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\" in protocol_definitions[\"Main\"]: for protocol_name", "# add protocol to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix,", "mtype, apical_point_isec, syn_locs, ) if \"Main\" in protocols_dict: efeatures = define_efeatures( protocols_dict[\"Main\"], features_path,", "protocols_dict (dict): contains all protocols to be run If this function is called,", ") return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det, ) def read_vecstim_protocol(protocol_name, protocol_definition,", "contains all protocols to be run Raises: Exception: If a protocol that should", "extra recording. Args: recording_definition (dict): contains the extra recording configuration data apical_point_isec (int):", "dict to which to append the protocol protocol_name (str): name of the protocol", "protocol protocol_name (str): name of the protocol protocol_definition (dict): dict containing the protocol", "in forbidden_prots: prot_name = type(prot).__name__ raise Exception( \"No MainProtocol found, but {prot} was", "in step_definition else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"], recordings=recordings, stochkv_det=stochkv_det,", "protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs )", "(bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of", "from protocol definition to protocols dict. Args: protocols_dict (dict): the dict to which", "Returns: dict containing the protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions", "on cell's threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"],", "== \"StepThresholdProtocol\" ): protocols_dict[protocol_name] = read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif (", "with this protocol Returns: RampThresholdProtocol: Ramp Protocol depending on cell's threshold current \"\"\"", "== \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli =", "SweepProtocolCustom(protocol_name, [stim], recordings) def get_extra_recording_location(recording_definition, apical_point_isec=-1): \"\"\"Get the location for the extra recording.", "protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature = efeatures[ f\"{prefix}.Rin.soma.v.voltage_base\" ] protocols_dict[\"ThresholdDetection\"].holding_voltage = efeatures[", "of the apical point Set to -1 no apical point is used in", "apical point section index. Should be given if the recording definition \"type\" is", "(bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set if stochastic or", "return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None", "to -1 no apical point is used in any extra recordings prot_path (str):", "# limitations under the License. import json import logging from bluepyopt import ephys", "protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus = ephys.stimuli.NrnRampPulse( ramp_amplitude_start=ramp_definition[\"ramp_amplitude_start\"], ramp_amplitude_end=ramp_definition[\"ramp_amplitude_end\"], ramp_delay=ramp_definition[\"ramp_delay\"], ramp_duration=ramp_definition[\"ramp_duration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) if \"holding\"", "the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating synapses \"\"\" stim_definition", "nor \"nrnseclistcomp\" Returns: location of the extra recording \"\"\" if recording_definition[\"type\"] == \"somadistance\":", "of the protocol protocol_definition (dict): contains the protocol configuration data recordings (bluepyopt.ephys.recordings.CompRecording): recordings", "section index Should be given if there is \"somadistanceapic\" in \"type\" of at", "file features_path (str): path to the features file mtype (str): morphology name to", "2.0 (the \"License\"); # you may not use this file except in compliance", "protocol_definition, recordings, stochkv_det ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepThresholdProtocol\"", "[ \"python\", \"neuron\", ]: logger.warning( \"vecstim random not set to 'python' nor to", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "in output filenames syn_locs (list): list of synapse locations stochkv_det (bool): set if", "if the recording definition \"type\" is \"somadistanceapic\" Raises: Exception: if the recording definition", "\"No MainProtocol found, but {prot} was found.\" f\"To use {prot_name}, please set MainProtocol.\"", "recordings to use with this protocol Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition =", "else None) Returns: dict containing the protocols \"\"\" with open(protocols_filepath, \"r\", encoding=\"utf-8\") as", "def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args: protocols_dict (dict): contains all protocols to", "Args: protocols_dict (dict): the dict to which to append the protocol protocol_name (str):", "def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol definition. Args: protocol_name (str):", "express or implied. # See the License for the specific language governing permissions", "recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec == -1: raise Exception( \"Cannot record at a", "efeatures, mtype) protocols = [protocols_dict[\"Main\"]] else: protocols = list(protocols_dict.values()) return ephys.protocols.SequenceProtocol( \"all protocols\",", "either express or implied. # See the License for the specific language governing", ") holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus,", "in [ \"python\", \"neuron\", ]: logger.warning( \"vecstim random not set to 'python' nor", "recording definition \"type\" is \"somadistanceapic\" and apical_point_isec is -1. Exception: if the 'type'", "the protocol protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to", "protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol syn_locs (list of", "\"\"\"Return a dict containing protocols. Args: apical_point_isec (int): section index of the apical", "Args: protocol_name (str): name of the protocol protocol_definition (dict): dict containing the protocol", "step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus,", "protocols_dict (dict): contains all protocols to be run Raises: Exception: If a protocol", "prot in protocols_dict.values(): if type(prot).__name__ in forbidden_prots: prot_name = type(prot).__name__ raise Exception( \"No", "least one of the extra recording definition Returns: list of RecordingCustom \"\"\" recordings", "apical_point_isec is -1. Exception: if the 'type' in the recording definition is neither", "ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec = { \"somatic\": \"soma\", \"apical\": \"apic\",", "unsupported protocol. Args: protocols_dict (dict): contains all protocols to be run Raises: Exception:", "the License. # You may obtain a copy of the License at #", "step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], ) else: holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus,", "Netstim stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"],", "if \"stochkv_det\" in step_definition else None ) return StepThresholdProtocol( name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, thresh_perc=step_definition[\"thresh_perc\"],", "holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name", "You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless", "isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli = [] for step_definition in step_definitions: step_stim", "): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition", "apical point Set to -1 no apical point is used in any extra", "synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\", ]:", "in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus =", ") elif \"type\" in protocol_definition and protocol_definition[\"type\"] == \"Netstim\": protocols_dict[protocol_name] = read_netstim_protocol( protocol_name,", "recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use with this protocol stochkv_det (bool): set if stochastic", "protocols_dict (dict): the dict to which to append the protocol protocol_name (str): name", "step_stimuli = [] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"],", "(if any, else None) \"\"\" if \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\":", "recordings to use with this protocol stochkv_det (bool): set if stochastic or deterministic", "stim_definition[\"vecstim_random\"], ) return SweepProtocolCustom(protocol_name, [stim], recordings) def read_netstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Netstim", "\"\"\"Read step protocol from definition. Args: protocol_name (str): name of the protocol protocol_definition", "ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition", "raise Exception(f\"Recording type {recording_definition['type']} not supported\") return location def get_recordings(protocol_name, protocol_definition, prefix, apical_point_isec=-1):", "they are not in MainProtocol forbidden_prots = [ \"RatSSCxRinHoldcurrentProtocol\", \"RatSSCxThresholdDetectionProtocol\", \"StepThresholdProtocol\", \"RampThresholdProtocol\", ]", "protocols_dict = {} for protocol_name, protocol_definition in protocol_definitions.items(): if protocol_name not in [\"Main\",", "ephys.stimuli.NrnSquarePulse( step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"], ) step_stimuli.append(step_stim) holding_stimulus = ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=step_definition[\"totduration\"], location=soma_loc,", "prefix, apical_point_isec ) # add protocol to protocol dict add_protocol( protocols_dict, protocol_name, protocol_definition,", "add_protocol( protocols_dict, protocol_name, protocol_definition, recordings, stochkv_det, prefix, syn_locs=None, ): \"\"\"Add protocol from protocol", "\"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif \"type\" in protocol_definition and protocol_definition[\"type\"] ==", "location=soma_loc, total_duration=stimulus_definition[\"totduration\"], ) ) protocols_dict[protocol_name] = ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict):", ") else: holding_stimulus = None return RampProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, recordings=recordings, ) def", "recording definition \"type\" is \"somadistanceapic\" Raises: Exception: if the recording definition \"type\" is", "of ephys.locations.NrnPointProcessLocation): locations of the synapses (if any, else None) \"\"\" if \"type\"", "point\" f\"if apical_point_isec is {apical_point_isec}.\" ) location = ephys.locations.NrnSecSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], sec_name=seclist_to_sec[recording_definition[\"seclist_name\"]], sec_index=apical_point_isec,", "stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\", ]: logger.warning( \"vecstim random not set to", "one of the extra recordings syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses", "\"IDRest\", step_protocol_template=read_step_protocol( \"Threshold\", protocol_definition[\"step_template\"], recordings ), prefix=prefix, ) elif \"type\" in protocol_definition and", "naming responses, features, recordings, etc. syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses", "Args: recording_definition (dict): contains the extra recording configuration data apical_point_isec (int): apical point", "neither \"somadistance\", nor \"somadistanceapic\", nor \"nrnseclistcomp\" Returns: location of the extra recording \"\"\"", "stochkv_det (bool): set if stochastic or deterministic Returns: StepThresholdProtocol: Step Protocol depending on", "http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software", "== \"Vecstim\": protocols_dict[protocol_name] = read_vecstim_protocol( protocol_name, protocol_definition, recordings, syn_locs ) elif \"type\" in", "= read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\" in protocol_definition and protocol_definition[\"type\"]", "and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol( protocol_name, protocol_definition, recordings, stochkv_det ) elif", "(bool): set if stochastic or deterministic Returns: StepProtocol: Step Protocol \"\"\" # pylint:", "open(protocols_filepath, \"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\" in protocol_definitions: del", "protocol_definition (dict): dict containing the protocol data prefix (str): prefix used in naming", ") elif ( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RatSSCxThresholdDetectionProtocol\" ): protocols_dict[\"ThresholdDetection\"] =", "if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name, protocol_definition in", "of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or", "protocol_definition, prefix, apical_point_isec=-1): \"\"\"Get recordings from protocol definition. Args: protocol_name (str): name of", "stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"],", "to which to append the protocol protocol_name (str): name of the protocol protocol_definition", "else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures of the", "protocol protocol_definition (dict): dict containing the protocol data recordings (bluepyopt.ephys.recordings.CompRecording): recordings to use", "prefix used in naming responses, features, recordings, etc. apical_point_isec (int): apical point section", "path to the protocols file features_path (str): path to the features file mtype", "= ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0, comp_x=0.5 ) seclist_to_sec = { \"somatic\": \"soma\", \"apical\":", "ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing Vecstim stimulus activating", "ephys.stimuli.NrnSquarePulse( step_delay=0.0, step_duration=ramp_definition[\"totduration\"], location=soma_loc, total_duration=ramp_definition[\"totduration\"], ) return RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"],", "in \"type\" of at least one of the extra recording definition Returns: list", "== \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name, protocol_definition, recordings ) elif ( \"type\"", "function is called, should contain the MainProtocol and the associated protocols (RinHoldCurrent, ThresholdDetection)", "prefix (str): prefix used in naming responses, features, recordings, etc. syn_locs (list of", "features_path=\"\", mtype=\"\", syn_locs=None, stochkv_det=None, ): \"\"\"Return a dict containing protocols. Args: apical_point_isec (int):", "in config file.\" \"vecstim random will be re-set to 'python'.\" ) stim_definition[\"vecstim_random\"] =", "in protocol_definitions[\"Main\"][\"other_protocols\"]: if protocol_name in protocols_dict: other_protocols.append(protocols_dict[protocol_name]) pre_protocols = [] if \"pre_protocols\" in", "None: stochkv_det = ( step_definition[\"stochkv_det\"] if \"stochkv_det\" in step_definition else None ) return", "except in compliance with the License. # You may obtain a copy of", "prefix (str): prefix used in naming responses, features, recordings, etc. \"\"\" protocols_dict[\"Main\"].rmp_efeature =", "emodelrunner.recordings import RecordingCustom from emodelrunner.features import define_efeatures from emodelrunner.synapses.stimuli import ( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom,", "point section index. Should be given if the recording definition \"type\" is \"somadistanceapic\"", "protocols dict. Args: protocols_dict (dict): the dict to which to append the protocol", "total_duration=ramp_definition[\"totduration\"], ) if \"holding\" in protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"],", "None) \"\"\" if \"type\" in protocol_definition and protocol_definition[\"type\"] == \"StepProtocol\": protocols_dict[protocol_name] = read_step_protocol(", "( \"type\" in protocol_definition and protocol_definition[\"type\"] == \"RampProtocol\" ): protocols_dict[protocol_name] = read_ramp_protocol( protocol_name,", "else: holding_stimulus = None if stochkv_det is None: stochkv_det = ( step_definition[\"stochkv_det\"] if", "protocol_name, protocol_definition, recordings, syn_locs ) else: stimuli = [] for stimulus_definition in protocol_definition[\"stimuli\"]:", "the protocols file stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix", "= json.load(protocol_file) if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"] protocols_dict = {} for protocol_name,", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "define_protocols( prot_path, stochkv_det, mtype, apical_point_isec, syn_locs, ) if \"Main\" in protocols_dict: efeatures =", ") else: check_for_forbidden_protocol(protocols_dict) return protocols_dict def set_main_protocol_efeatures(protocols_dict, efeatures, prefix): \"\"\"Set the efeatures of", "recordings, etc. apical_point_isec (int): apical point section index Should be given if there", "ephys.protocols.SweepProtocol( name=protocol_name, stimuli=stimuli, recordings=recordings ) def check_for_forbidden_protocol(protocols_dict): \"\"\"Check for unsupported protocol. Args: protocols_dict", "use with this protocol Returns: RampProtocol: Ramp Protocol \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"] ramp_stimulus", "StepThresholdProtocol: Step Protocol depending on cell's threshold currentd \"\"\" # pylint: disable=undefined-loop-variable step_definitions", "Returns: RampThresholdProtocol: Ramp Protocol depending on cell's threshold current \"\"\" ramp_definition = protocol_definition[\"stimuli\"][\"ramp\"]", "RampThresholdProtocol( name=protocol_name, ramp_stimulus=ramp_stimulus, holding_stimulus=holding_stimulus, thresh_perc_start=ramp_definition[\"thresh_perc_start\"], thresh_perc_end=ramp_definition[\"thresh_perc_end\"], recordings=recordings, ) def read_ramp_protocol(protocol_name, protocol_definition, recordings): \"\"\"Read", "protocol_definition[\"stimuli\"]: holding_definition = protocol_definition[\"stimuli\"][\"holding\"] holding_stimulus = ephys.stimuli.NrnSquarePulse( step_amplitude=holding_definition[\"amp\"], step_delay=holding_definition[\"delay\"], step_duration=holding_definition[\"duration\"], location=soma_loc, total_duration=holding_definition[\"totduration\"], )", "ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"] == \"somadistanceapic\": if apical_point_isec == -1:", "in any extra recordings prot_path (str): path to the protocols file features_path (str):", "the features file mtype (str): morphology name to be used as prefix in", "rin_protocol_template=protocols_dict[\"Rin\"], holdi_precision=protocol_definitions[\"RinHoldcurrent\"][\"holdi_precision\"], holdi_max_depth=protocol_definitions[\"RinHoldcurrent\"][\"holdi_max_depth\"], prefix=prefix, ) other_protocols = [] for protocol_name in protocol_definitions[\"Main\"][\"other_protocols\"]: if", "\"r\", encoding=\"utf-8\") as protocol_file: protocol_definitions = json.load(protocol_file) if \"__comment\" in protocol_definitions: del protocol_definitions[\"__comment\"]", ") stim_definition[\"vecstim_random\"] = \"python\" stim = NrnVecStimStimulusCustom( syn_locs, stim_definition[\"syn_start\"], stim_definition[\"syn_stop\"], stim_definition[\"syn_stim_seed\"], stim_definition[\"vecstim_random\"], )", "stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ): \"\"\"Read step threshold protocol", "the specific language governing permissions and # limitations under the License. import json", "[] for step_definition in step_definitions: step_stim = ephys.stimuli.NrnSquarePulse( step_amplitude=step_definition[\"amp\"], step_delay=step_definition[\"delay\"], step_duration=step_definition[\"duration\"], location=soma_loc, total_duration=step_definition[\"totduration\"],", "there is \"somadistanceapic\" in \"type\" of at least one of the extra recording", "name=protocol_name, step_stimuli=step_stimuli, holding_stimulus=holding_stimulus, recordings=recordings, stochkv_det=stochkv_det, ) def read_step_threshold_protocol( protocol_name, protocol_definition, recordings, stochkv_det=None ):", "if recording_definition[\"type\"] == \"somadistance\": location = ephys.locations.NrnSomaDistanceCompLocation( name=recording_definition[\"name\"], soma_distance=recording_definition[\"somadistance\"], seclist_name=recording_definition[\"seclist_name\"], ) elif recording_definition[\"type\"]", "\"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [ \"python\", \"neuron\", ]: logger.warning(", "protocol_definition and protocol_definition[\"type\"] == \"RampThresholdProtocol\" ): protocols_dict[protocol_name] = read_ramp_threshold_protocol( protocol_name, protocol_definition, recordings )", "there is \"somadistanceapic\" in \"type\" of at least one of the extra recordings", "the extra recording configuration data apical_point_isec (int): apical point section index. Should be", "read_vecstim_protocol(protocol_name, protocol_definition, recordings, syn_locs): \"\"\"Read Vecstim protocol from definitions. Args: protocol_name (str): name", "RampProtocol, RampThresholdProtocol, StepProtocol, StepThresholdProtocol, RatSSCxThresholdDetectionProtocol, RatSSCxRinHoldcurrentProtocol, RatSSCxMainProtocol, SweepProtocolCustom, ) from emodelrunner.recordings import RecordingCustom", "be run If this function is called, should contain the MainProtocol and the", "given if there is \"somadistanceapic\" in \"type\" of at least one of the", "= efeatures[f\"{prefix}.RMP.soma.v.voltage_base\"] protocols_dict[\"Main\"].rin_efeature = efeatures[ f\"{prefix}.Rin.soma.v.ohmic_input_resistance_vb_ssse\" ] protocols_dict[\"Main\"].rin_efeature.stimulus_current = protocols_dict[ \"Main\" ].rinhold_protocol.rin_protocol_template.step_amplitude protocols_dict[\"RinHoldcurrent\"].voltagebase_efeature", "stimulus activating synapses \"\"\" stim_definition = protocol_definition[\"stimuli\"] if stim_definition[\"vecstim_random\"] not in [ \"python\",", "or deterministic prefix (str): prefix used in naming responses, features, recordings, etc. syn_locs", "( NrnNetStimStimulusCustom, NrnVecStimStimulusCustom, ) logger = logging.getLogger(__name__) soma_loc = ephys.locations.NrnSeclistCompLocation( name=\"soma\", seclist_name=\"somatic\", sec_index=0,", "Vecstim protocol from definitions. Args: protocol_name (str): name of the protocol protocol_definition (dict):", "used as prefix in output filenames syn_locs (list): list of synapse locations stochkv_det", "\"\"\" stim_definition = protocol_definition[\"stimuli\"] stim = NrnNetStimStimulusCustom( syn_locs, stim_definition[\"syn_stop\"], stim_definition[\"syn_nmb_of_spikes\"], stim_definition[\"syn_interval\"], stim_definition[\"syn_start\"], stim_definition[\"syn_noise\"],", "pylint: disable=undefined-loop-variable step_definitions = protocol_definition[\"stimuli\"][\"step\"] if isinstance(step_definitions, dict): step_definitions = [step_definitions] step_stimuli =", "protocol_definitions[\"Main\"][\"pre_protocols\"]: pre_protocols.append(protocols_dict[protocol_name]) protocols_dict[\"Main\"] = RatSSCxMainProtocol( \"Main\", rmp_protocol=protocols_dict[\"RMP\"], rinhold_protocol=protocols_dict[\"RinHoldcurrent\"], thdetect_protocol=protocols_dict[\"ThresholdDetection\"], other_protocols=other_protocols, pre_protocols=pre_protocols, ) else:", "syn_locs (list of ephys.locations.NrnPointProcessLocation): locations of the synapses Returns: emodelrunner.protocols.SweepProtocolCustom: a protocol containing", "stochkv_det (bool): set if stochastic or deterministic prefix (str): prefix used in naming" ]
[]
[ "else: return jsonify({'error': \"400: POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global", "requests_sent[uID] = [] if not (uID in requests): requests[uID] = [] return jsonify({'sent':", "jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat uIp = request.access_route[0] uID", "+ \"<hr \\\\>\" view = view + \"<form action=\\\" \" + \"/req\" +", "<br \\\\><br \\\\>\" view = view + \"<form action=\\\" \" + \"/post\" +", "| <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0] uID =", "= request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def", "in clients: clients.append(uID) addChat(\"--- \" + uID + \" Joined the Chat ---\")", "action=\\\" \" + \"/req\" + \"\\\" method=\\\"post\\\">\" view = view + \"<h4> To:", "chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not in clients: clients.append(uID) addChat(\"--- \" +", "request.access_route[0] uID = getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global c c = c", "#[from, to, status[0sent, 1accepted, 2rejected]] requests = {} requests_sent = {} version =", "view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat: view = view +", "and displayed. <br \\\\><br \\\\>\" view = view + \"<form action=\\\" \" +", "200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp)", "\\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat: view = view + i.replace(\"<\", \"\").replace(\">\", \"\")", "view = view + \"A+ v. \" + str(version) + \" | <a", "return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\",", "requests_sent uIp = request.access_route[0] uID = getUID(uIp) if \"to\" in request.form: to =", "sendView(): view = \"\" view = view + \"<h3> Send a Chat Request", "+ \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view + \"note that only the latest", "uIp = request.access_route[0] uID = getUID(uIp) lis = [] if not (uID in", "+ uIp + \")<br \\\\>\" view = view + \"Refresh the page to", "200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH():", "\": \" + msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip':", "= view + \"note that only the latest 50 messages are stored and", "= request.form['msg'] addChat(uID + \": \" + msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"])", "(to in requests): requests[to] = [] requests[to].append(req) if not (uID in requests_sent): requests_sent[uID]", "requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return jsonify({'error': \"400: POST", "requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def", "50 messages are stored and displayed. <br \\\\><br \\\\>\" view = view +", "getUID(uIp) lis = [] if not (uID in requests_sent): requests_sent[uID] = [] if", "view = view + \"Connected as: \" + uID + \" (\" +", "= view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view + \"<input type=\\\"submit\\\">\"", "in request.form: to = request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]] req =", "handle_data(): uIp = request.access_route[0] uID = getUID(uIp) msg = request.form['msg'] addChat(uID + \":", "requests_sent uIp = request.access_route[0] uID = getUID(uIp) lis = [] if not (uID", "i in chat: view = view + i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\"", "getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat uIp", "only the latest 50 messages are stored and displayed. <br \\\\><br \\\\>\" view", "\\\\>\" view = view + \"<form action=\\\" \" + \"/req\" + \"\\\" method=\\\"post\\\">\"", "@app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp)", "unannounceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}),", "__name__ == '__main__': # Bind to PORT if defined, otherwise default to 5000.", "requests = {} requests_sent = {} version = 5 additive = 0 def", "PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)", "= view + \"</form>\" view = view + \"<br \\\\><hr \\\\>\" view =", "view = \"<title>A+</title>\" global c c = c + 1 view = view", "return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat uIp = request.access_route[0]", "chat = [] #[from, to, status[0sent, 1accepted, 2rejected]] requests = {} requests_sent =", "name=\\\"to\\\"><br \\\\>\" view = view + \"<input type=\\\"submit\\\">\" view = view + \"</form>\"", "= view + i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\" view = view +", "(uID in requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\",", "return jsonify({'error': \"400: POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests,", "additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\")) def", "\" + \"/req\" + \"\\\" method=\\\"post\\\">\" view = view + \"<h4> To: </h4>\"", "chat uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\",", "stored and displayed. <br \\\\><br \\\\>\" view = view + \"<form action=\\\" \"", "\"<h4> To: </h4>\" view = view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view =", "toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if", "methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def", "= view + \"<h4> To: </h4>\" view = view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br", "view + \"</form>\" view = view + \"<br \\\\><hr \\\\>\" view = view", "uID = getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global c c = c +", "msg = request.form['msg'] addChat(uID + \": \" + msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\",", "+ \" (\" + uIp + \")<br \\\\>\" view = view + \"Refresh", "getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}),", "def sendView(): view = \"\" view = view + \"<h3> Send a Chat", "= 0 clients = [] chat = [] #[from, to, status[0sent, 1accepted, 2rejected]]", "c c = c + 1 view = view + \"<h3> Public Chat", "Chat ---\") print(\"connection from \" + str(request.remote_addr)) def removeClient(uID): if uID in clients:", "view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view + \"<input type=\\\"submit\\\">\" view =", "addChat(\"--- \" + uID + \" Left the Chat ---\") @app.route('/') def hello():", "= getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat", "+ \"<h4> To: </h4>\" view = view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view", "Chat ---\") @app.route('/') def hello(): global chat, version uIp = request.access_route[0] uID =", "uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def", "= request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def", "\\\\>\" view = view + \"note that only the latest 50 messages are", "= True): global chat, additive if limit: additive = additive + 1 print(\"new", "+ \"</form>\" view = view + \"<hr \\\\>\" return view, 200 if __name__", "if uID not in clients: clients.append(uID) addChat(\"--- \" + uID + \" Joined", "c = c + 1 view = view + \"<h3> Public Chat </h3>\"", "chat: \" + toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0)", "\"to\" in request.form: to = request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]] req", "\\\\>\" view = view + \"<form action=\\\" \" + \"/post\" + \"\\\" method=\\\"post\\\">\"", "+ msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id'", "return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat", "jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat uIp", "\" + uID + \" Joined the Chat ---\") print(\"connection from \" +", "True): global chat, additive if limit: additive = additive + 1 print(\"new chat:", "+ \")<br \\\\>\" view = view + \"Refresh the page to access the", "view = \"\" view = view + \"<h3> Send a Chat Request </h3>\"", "+ 1 view = view + \"<h3> Public Chat </h3>\" view = view", "view = view + \"<input type=\\\"submit\\\">\" view = view + \"</form>\" view =", "getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global c c = c + 1 view", "[] requests[to].append(req) if not (uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\",", "view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat: view =", "view + \"<hr \\\\>\" view = view + \"<form action=\\\" \" + \"/req\"", "msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' :", "code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\",", "\" Left the Chat ---\") @app.route('/') def hello(): global chat, version uIp =", "view + \"note that only the latest 50 messages are stored and displayed.", "---\") print(\"connection from \" + str(request.remote_addr)) def removeClient(uID): if uID in clients: clients.remove(uID)", "import Flask, redirect from flask import request from flask import jsonify import hashlib", "the Chat ---\") @app.route('/') def hello(): global chat, version uIp = request.access_route[0] uID", "\" Joined the Chat ---\") print(\"connection from \" + str(request.remote_addr)) def removeClient(uID): if", "view + \"<hr \\\\>\" return view, 200 if __name__ == '__main__': # Bind", "= view + \"<h3> Public Chat </h3>\" view = view + \"Connected as:", "@app.route(\"/req\", methods=['POST']) def requestCH(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp)", "\"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view + \"<input type=\\\"submit\\\">\" view = view", "return view, 200 if __name__ == '__main__': # Bind to PORT if defined,", "+ \"<input type=\\\"submit\\\">\" view = view + \"</form>\" view = view + \"<br", "in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return jsonify({'error': \"400:", "@app.route(\"/send\", methods=[\"GET\"]) def sendView(): view = \"\" view = view + \"<h3> Send", "= request.access_route[0] uID = getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global c c =", "+ \"note that only the latest 50 messages are stored and displayed. <br", "requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) if \"to\" in request.form: to", "return redirect(\"/\", code=302) else: return jsonify({'error': \"400: POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"])", "request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced():", "view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view + \"note that only the", "methods=[\"GET\"]) def announceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) return", "\\\\>-----------------------------------------------------------------------<br \\\\>\" view = view + \"note that only the latest 50 messages", "= [uID, to, 0] if not (to in requests): requests[to] = [] requests[to].append(req)", "POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests, requests_sent uIp =", "requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView():", "view = view + \"<hr \\\\>\" view = view + \"<form action=\\\" \"", "type=\\\"text\\\" name=\\\"msg\\\">\" view = view + \"<input type=\\\"submit\\\">\" view = view + \"</form>\"", "Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests, requests_sent uIp = request.access_route[0]", "request.access_route[0] uID = getUID(uIp) if \"to\" in request.form: to = request.form['to'] # [from,", "To: </h4>\" view = view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view", "uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat uIp = request.access_route[0] uID =", "+ i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br", "[uID, to, 0] if not (to in requests): requests[to] = [] requests[to].append(req) if", "= [] if not (uID in requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID],", "def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH(): global requests, requests_sent", "toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not", "+ \"<input type=\\\"submit\\\">\" view = view + \"</form>\" view = view + \"<hr", "200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view = \"\" view = view + \"<h3>", "name=\\\"msg\\\">\" view = view + \"<input type=\\\"submit\\\">\" view = view + \"</form>\" view", "addClient(uID) view = \"<title>A+</title>\" global c c = c + 1 view =", "v. \" + str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def", "= additive + 1 print(\"new chat: \" + toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\",", "= getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global c c = c + 1", "= [] requests[to].append(req) if not (uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return", "from \" + str(request.remote_addr)) def removeClient(uID): if uID in clients: clients.remove(uID) addChat(\"--- \"", "getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit = True):", "def addClient(uID): if uID not in clients: clients.append(uID) addChat(\"--- \" + uID +", "+ str(request.remote_addr)) def removeClient(uID): if uID in clients: clients.remove(uID) addChat(\"--- \" + uID", "+ \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat: view = view + i.replace(\"<\",", "requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return jsonify({'error': \"400: POST Request", "+ \"/post\" + \"\\\" method=\\\"post\\\">\" view = view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view", "Flask(__name__) c = 0 clients = [] chat = [] #[from, to, status[0sent,", "+ \"\\\" method=\\\"post\\\">\" view = view + \"<h4> To: </h4>\" view = view", "view + \"A+ v. \" + str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view)", "method=\\\"post\\\">\" view = view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view + \"<input", "view + \"<input type=\\\"submit\\\">\" view = view + \"</form>\" view = view +", "= view + \"<hr \\\\>\" return view, 200 if __name__ == '__main__': #", "are stored and displayed. <br \\\\><br \\\\>\" view = view + \"<form action=\\\"", "getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit = True): global chat, additive if", "hello(): global chat, version uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) view =", "getUID(uIp) if \"to\" in request.form: to = request.form['to'] # [from, to, status[0sent, 1accepted,", "Flask, redirect from flask import request from flask import jsonify import hashlib app", "\"</form>\" view = view + \"<hr \\\\>\" return view, 200 if __name__ ==", "import os from flask import Flask, redirect from flask import request from flask", "= view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view + \"note that only", "\" + msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0],", "'__main__': # Bind to PORT if defined, otherwise default to 5000. port =", "latest 50 messages are stored and displayed. <br \\\\><br \\\\>\" view = view", "announceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}),", "def getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit = True): global chat, additive", ": getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat uIp = request.access_route[0] uID", "= [] if not (uID in requests_sent): requests_sent[uID] = [] if not (uID", "the Chat ---\") print(\"connection from \" + str(request.remote_addr)) def removeClient(uID): if uID in", "addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat uIp =", "+ \"<br \\\\><hr \\\\>\" view = view + \"A+ v. \" + str(version)", "0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit", "addClient(uID): if uID not in clients: clients.append(uID) addChat(\"--- \" + uID + \"", "requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view = \"\" view = view +", "[] chat = [] #[from, to, status[0sent, 1accepted, 2rejected]] requests = {} requests_sent", "redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200", "if uID in clients: clients.remove(uID) addChat(\"--- \" + uID + \" Left the", "getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat uIp = request.access_route[0] uID =", "access the latest messages.\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i", "0 clients = [] chat = [] #[from, to, status[0sent, 1accepted, 2rejected]] requests", "methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH(): global requests,", "uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return", "view + \"<h4> To: </h4>\" view = view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\"", "if not (uID in requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}),", "methods=[\"GET\"]) def sendStatus(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) lis", "+ \"<br \\\\>\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view", "clients: clients.remove(uID) addChat(\"--- \" + uID + \" Left the Chat ---\") @app.route('/')", "+ \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0]", "additive if limit: additive = additive + 1 print(\"new chat: \" + toAdd)", "getUID(uIp) msg = request.form['msg'] addChat(uID + \": \" + msg) return redirect(\"/\", code=302)", "\"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view + \"<input type=\\\"submit\\\">\" view = view +", "view = view + \"<br \\\\><hr \\\\>\" view = view + \"A+ v.", "expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests, requests_sent uIp = request.access_route[0] uID", "= view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view + \"<input type=\\\"submit\\\">\" view", "flask import Flask, redirect from flask import request from flask import jsonify import", "a Chat Request </h3>\" view = view + \"<hr \\\\>\" view = view", "jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view = \"\" view", "= getUID(uIp) if \"to\" in request.form: to = request.form['to'] # [from, to, status[0sent,", "get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global", "chat uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\",", "+ \"\\\" method=\\\"post\\\">\" view = view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view", "= view + \"<form action=\\\" \" + \"/post\" + \"\\\" method=\\\"post\\\">\" view =", "= getUID(uIp) msg = request.form['msg'] addChat(uID + \": \" + msg) return redirect(\"/\",", "---\") @app.route('/') def hello(): global chat, version uIp = request.access_route[0] uID = getUID(uIp)", "view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view + \"<input type=\\\"submit\\\">\" view", "= [] #[from, to, status[0sent, 1accepted, 2rejected]] requests = {} requests_sent = {}", "hashlib app = Flask(__name__) c = 0 clients = [] chat = []", "= \"\" view = view + \"<h3> Send a Chat Request </h3>\" view", "the latest 50 messages are stored and displayed. <br \\\\><br \\\\>\" view =", "\" + toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0) chat.append(toAdd)", "+ \" Joined the Chat ---\") print(\"connection from \" + str(request.remote_addr)) def removeClient(uID):", "\" + str(request.remote_addr)) def removeClient(uID): if uID in clients: clients.remove(uID) addChat(\"--- \" +", "view + \"<h3> Public Chat </h3>\" view = view + \"Connected as: \"", "view = view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view + \"<input", "\"\") + \"<br \\\\>\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view =", "(uID in requests_sent): requests_sent[uID] = [] if not (uID in requests): requests[uID] =", "+ \"<h3> Send a Chat Request </h3>\" view = view + \"<hr \\\\>\"", "\\\\><br \\\\>\" view = view + \"<form action=\\\" \" + \"/post\" + \"\\\"", "status[0sent, 1accepted, 2rejected]] requests = {} requests_sent = {} version = 5 additive", "def requestCH(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) if \"to\"", "2rejected]] requests = {} requests_sent = {} version = 5 additive = 0", "Left the Chat ---\") @app.route('/') def hello(): global chat, version uIp = request.access_route[0]", "\" + uID + \" Left the Chat ---\") @app.route('/') def hello(): global", "view = view + \"<form action=\\\" \" + \"/post\" + \"\\\" method=\\\"post\\\">\" view", "\"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not in", "return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0] uID = getUID(uIp) msg =", "\"<hr \\\\>\" view = view + \"<form action=\\\" \" + \"/req\" + \"\\\"", "chat, additive if limit: additive = additive + 1 print(\"new chat: \" +", "in requests): requests[to] = [] requests[to].append(req) if not (uID in requests_sent): requests_sent[uID] =", "</h3>\" view = view + \"<hr \\\\>\" view = view + \"<form action=\\\"", "listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH(): global requests, requests_sent uIp", "requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view = \"\" view =", "= 5 additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\",", "\" + \"/post\" + \"\\\" method=\\\"post\\\">\" view = view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\"", "methods=['POST']) def handle_data(): uIp = request.access_route[0] uID = getUID(uIp) msg = request.form['msg'] addChat(uID", "from flask import Flask, redirect from flask import request from flask import jsonify", "action=\\\" \" + \"/post\" + \"\\\" method=\\\"post\\\">\" view = view + \"<input type=\\\"text\\\"", "Request </h3>\" view = view + \"<hr \\\\>\" view = view + \"<form", "additive = additive + 1 print(\"new chat: \" + toAdd) toAdd = toAdd.replace(\"<script>\",", "= view + \"</form>\" view = view + \"<hr \\\\>\" return view, 200", "\"<br \\\\><hr \\\\>\" view = view + \"A+ v. \" + str(version) +", "200 if __name__ == '__main__': # Bind to PORT if defined, otherwise default", "\"<br \\\\>\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view +", "= {} version = 5 additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def", "view + \"<form action=\\\" \" + \"/post\" + \"\\\" method=\\\"post\\\">\" view = view", "= request.access_route[0] uID = getUID(uIp) if \"to\" in request.form: to = request.form['to'] #", "uIp = request.access_route[0] uID = getUID(uIp) msg = request.form['msg'] addChat(uID + \": \"", "def handle_data(): uIp = request.access_route[0] uID = getUID(uIp) msg = request.form['msg'] addChat(uID +", "view + \"<form action=\\\" \" + \"/req\" + \"\\\" method=\\\"post\\\">\" view = view", "request.form['msg'] addChat(uID + \": \" + msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def", "\"<input type=\\\"submit\\\">\" view = view + \"</form>\" view = view + \"<hr \\\\>\"", "> 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not in clients: clients.append(uID) addChat(\"---", "@app.route('/') def hello(): global chat, version uIp = request.access_route[0] uID = getUID(uIp) addClient(uID)", "messages are stored and displayed. <br \\\\><br \\\\>\" view = view + \"<form", "uIp = request.access_route[0] uID = getUID(uIp) if \"to\" in request.form: to = request.form['to']", "def announceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you':", "400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests, requests_sent uIp = request.access_route[0] uID =", "view = view + \"</form>\" view = view + \"<hr \\\\>\" return view,", "hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit = True): global chat,", "\\\\><hr \\\\>\" view = view + \"A+ v. \" + str(version) + \"", "view = view + \"<h3> Public Chat </h3>\" view = view + \"Connected", "+ \"</form>\" view = view + \"<br \\\\><hr \\\\>\" view = view +", "messages.\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat: view", "global chat uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200", "= request.access_route[0] uID = getUID(uIp) msg = request.form['msg'] addChat(uID + \": \" +", "\"/post\" + \"\\\" method=\\\"post\\\">\" view = view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view =", "== '__main__': # Bind to PORT if defined, otherwise default to 5000. port", "</h4>\" view = view + \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view +", "\"\").replace(\">\", \"\") + \"<br \\\\>\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view", "\\\\>\" for i in chat: view = view + i.replace(\"<\", \"\").replace(\">\", \"\") +", "for i in chat: view = view + i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br", "\" + uID + \" (\" + uIp + \")<br \\\\>\" view =", "view = view + i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\" view = view", "def sendStatus(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) lis =", "[] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view =", "\"/req\" + \"\\\" method=\\\"post\\\">\" view = view + \"<h4> To: </h4>\" view =", "uID + \" (\" + uIp + \")<br \\\\>\" view = view +", "latest messages.\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat:", "uID = getUID(uIp) msg = request.form['msg'] addChat(uID + \": \" + msg) return", "\" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0] uID", "uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"])", "app = Flask(__name__) c = 0 clients = [] chat = [] #[from,", "= getUID(uIp) lis = [] if not (uID in requests_sent): requests_sent[uID] = []", "+ \"<input type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view + \"<input type=\\\"submit\\\">\" view =", "return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}),", "int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit = True): global chat, additive if limit: additive", "in clients: clients.remove(uID) addChat(\"--- \" + uID + \" Left the Chat ---\")", "'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat uIp = request.access_route[0]", "limit = True): global chat, additive if limit: additive = additive + 1", "uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global c c", "+ 1 print(\"new chat: \" + toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive", "jsonify({'error': \"400: POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests, requests_sent", "view + \"<h3> Send a Chat Request </h3>\" view = view + \"<hr", "\"A+ v. \" + str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST'])", "view = view + \"</form>\" view = view + \"<br \\\\><hr \\\\>\" view", "os from flask import Flask, redirect from flask import request from flask import", "1 print(\"new chat: \" + toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive >", "\"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view + \"note that only the latest 50", "uID + \" Left the Chat ---\") @app.route('/') def hello(): global chat, version", "not (uID in requests_sent): requests_sent[uID] = [] if not (uID in requests): requests[uID]", "global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) if \"to\" in request.form:", "jsonify import hashlib app = Flask(__name__) c = 0 clients = [] chat", "code=302) else: return jsonify({'error': \"400: POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus():", "view + \"Refresh the page to access the latest messages.\" view = view", "lis = [] if not (uID in requests_sent): requests_sent[uID] = [] if not", "\"<form action=\\\" \" + \"/req\" + \"\\\" method=\\\"post\\\">\" view = view + \"<h4>", "# Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT',", "\\\\>\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view + \"note", "@app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"])", "def addChat(toAdd, limit = True): global chat, additive if limit: additive = additive", "\"\")) def addChat(toAdd, limit = True): global chat, additive if limit: additive =", "= [] chat = [] #[from, to, status[0sent, 1accepted, 2rejected]] requests = {}", "clients: clients.append(uID) addChat(\"--- \" + uID + \" Joined the Chat ---\") print(\"connection", "= view + \"<hr \\\\>\" view = view + \"<form action=\\\" \" +", "flask import jsonify import hashlib app = Flask(__name__) c = 0 clients =", "addChat(toAdd, limit = True): global chat, additive if limit: additive = additive +", "not in clients: clients.append(uID) addChat(\"--- \" + uID + \" Joined the Chat", "if not (uID in requests_sent): requests_sent[uID] = [] if not (uID in requests):", "type=\\\"text\\\" name=\\\"to\\\"><br \\\\>\" view = view + \"<input type=\\\"submit\\\">\" view = view +", "Public Chat </h3>\" view = view + \"Connected as: \" + uID +", "def hello(): global chat, version uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) view", "view = view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view + \"<input type=\\\"submit\\\">\"", "to, status[0sent, 1accepted, 2rejected]] requests = {} requests_sent = {} version = 5", "global chat, additive if limit: additive = additive + 1 print(\"new chat: \"", "view = view + \"Refresh the page to access the latest messages.\" view", "@app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) addClient(uID)", "addChat(uID + \": \" + msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip():", "+ \"A+ v. \" + str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post',", "+ uID + \" (\" + uIp + \")<br \\\\>\" view = view", "return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH(): global requests, requests_sent uIp =", "[from, to, status[0sent, 1accepted, 2rejected]] req = [uID, to, 0] if not (to", "displayed. <br \\\\><br \\\\>\" view = view + \"<form action=\\\" \" + \"/post\"", "to = request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]] req = [uID, to,", "'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view = \"\" view = view", "def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit =", "= view + \"Refresh the page to access the latest messages.\" view =", "2rejected]] req = [uID, to, 0] if not (to in requests): requests[to] =", "methods=['POST']) def requestCH(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) if", "Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000))", "to, status[0sent, 1accepted, 2rejected]] req = [uID, to, 0] if not (to in", "req = [uID, to, 0] if not (to in requests): requests[to] = []", "print(\"new chat: \" + toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50):", "Chat Request </h3>\" view = view + \"<hr \\\\>\" view = view +", "if __name__ == '__main__': # Bind to PORT if defined, otherwise default to", "= view + \"<input type=\\\"submit\\\">\" view = view + \"</form>\" view = view", "c = 0 clients = [] chat = [] #[from, to, status[0sent, 1accepted,", "+ \"<form action=\\\" \" + \"/post\" + \"\\\" method=\\\"post\\\">\" view = view +", "view = view + \"<h4> To: </h4>\" view = view + \"<input type=\\\"text\\\"", "the page to access the latest messages.\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br", "requestCH(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) if \"to\" in", "[] if not (uID in requests_sent): requests_sent[uID] = [] if not (uID in", "= [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view", "global c c = c + 1 view = view + \"<h3> Public", "= view + \"Connected as: \" + uID + \" (\" + uIp", "\")<br \\\\>\" view = view + \"Refresh the page to access the latest", "\\\\>\" view = view + \"A+ v. \" + str(version) + \" |", "\\\\>\" view = view + \"<input type=\\\"submit\\\">\" view = view + \"</form>\" view", "@app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID)", "in requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"])", "+ uID + \" Joined the Chat ---\") print(\"connection from \" + str(request.remote_addr))", "request.access_route[0] uID = getUID(uIp) msg = request.form['msg'] addChat(uID + \": \" + msg)", "(\" + uIp + \")<br \\\\>\" view = view + \"Refresh the page", "requests[to] = [] requests[to].append(req) if not (uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req)", "= 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd,", "to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0',", "def unannounceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID) return jsonify({'you':", "\"Refresh the page to access the latest messages.\" view = view + \"<br", "+ \" Left the Chat ---\") @app.route('/') def hello(): global chat, version uIp", "redirect from flask import request from flask import jsonify import hashlib app =", "(uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return jsonify({'error':", "limit: additive = additive + 1 print(\"new chat: \" + toAdd) toAdd =", "uID + \" Joined the Chat ---\") print(\"connection from \" + str(request.remote_addr)) def", "+ toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0) chat.append(toAdd) def", "if \"to\" in request.form: to = request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]]", "= request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]] req = [uID, to, 0]", "def get_my_ip(): return jsonify({'ip': request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem():", "to access the latest messages.\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for", "uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global", "0] if not (to in requests): requests[to] = [] requests[to].append(req) if not (uID", "= \"<title>A+</title>\" global c c = c + 1 view = view +", "if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not in clients: clients.append(uID)", "\"<form action=\\\" \" + \"/post\" + \"\\\" method=\\\"post\\\">\" view = view + \"<input", "version = 5 additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return", "= view + \"<br \\\\><hr \\\\>\" view = view + \"A+ v. \"", "= getUID(uIp) removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients':", "{} version = 5 additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip):", "+ \"<h3> Public Chat </h3>\" view = view + \"Connected as: \" +", "1accepted, 2rejected]] requests = {} requests_sent = {} version = 5 additive =", "1 view = view + \"<h3> Public Chat </h3>\" view = view +", "= request.access_route[0] uID = getUID(uIp) lis = [] if not (uID in requests_sent):", "method=\\\"post\\\">\" view = view + \"<h4> To: </h4>\" view = view + \"<input", "as: \" + uID + \" (\" + uIp + \")<br \\\\>\" view", "200 @app.route(\"/req\", methods=['POST']) def requestCH(): global requests, requests_sent uIp = request.access_route[0] uID =", "request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]] req = [uID, to, 0] if", "additive + 1 print(\"new chat: \" + toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\")", "= c + 1 view = view + \"<h3> Public Chat </h3>\" view", "from flask import request from flask import jsonify import hashlib app = Flask(__name__)", "@app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0] uID = getUID(uIp) msg = request.form['msg']", "+ str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp", "global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) lis = [] if", "\\\\>\" view = view + \"Refresh the page to access the latest messages.\"", "= [] requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return jsonify({'error': \"400: POST Request expected\"}),", "from flask import jsonify import hashlib app = Flask(__name__) c = 0 clients", "clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH(): global requests, requests_sent uIp = request.access_route[0] uID", "Joined the Chat ---\") print(\"connection from \" + str(request.remote_addr)) def removeClient(uID): if uID", "\"<input type=\\\"submit\\\">\" view = view + \"</form>\" view = view + \"<br \\\\><hr", "\"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat: view = view + i.replace(\"<\", \"\").replace(\">\",", "to, 0] if not (to in requests): requests[to] = [] requests[to].append(req) if not", "chat: view = view + i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\" view =", "print(\"connection from \" + str(request.remote_addr)) def removeClient(uID): if uID in clients: clients.remove(uID) addChat(\"---", "uID = getUID(uIp) if \"to\" in request.form: to = request.form['to'] # [from, to,", "toAdd) toAdd = toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID):", "= view + \"<form action=\\\" \" + \"/req\" + \"\\\" method=\\\"post\\\">\" view =", "def removeClient(uID): if uID in clients: clients.remove(uID) addChat(\"--- \" + uID + \"", "view = view + \"<hr \\\\>\" return view, 200 if __name__ == '__main__':", "+ \"<form action=\\\" \" + \"/req\" + \"\\\" method=\\\"post\\\">\" view = view +", "{} requests_sent = {} version = 5 additive = 0 def getUID(ip): return", "addChat(\"--- \" + uID + \" Joined the Chat ---\") print(\"connection from \"", "+ \"Refresh the page to access the latest messages.\" view = view +", "50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not in clients: clients.append(uID) addChat(\"--- \"", "\"<h3> Send a Chat Request </h3>\" view = view + \"<hr \\\\>\" view", "return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200 @app.route(\"/send\", methods=[\"GET\"]) def sendView(): view = \"\"", "view = view + \"note that only the latest 50 messages are stored", "uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"])", "@app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH(): global", "</h3>\" view = view + \"Connected as: \" + uID + \" (\"", "href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0] uID = getUID(uIp) msg", "view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" view = view + \"note that", "= toAdd.replace(\"<script>\", \"\").replace(\"</script>\", \"\") if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID", "[] requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return jsonify({'error': \"400: POST Request expected\"}), 400", "+ \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view + \"<input type=\\\"submit\\\">\" view = view", "[] #[from, to, status[0sent, 1accepted, 2rejected]] requests = {} requests_sent = {} version", "import request from flask import jsonify import hashlib app = Flask(__name__) c =", "requests_sent): requests_sent[uID] = [] if not (uID in requests): requests[uID] = [] return", "\"Connected as: \" + uID + \" (\" + uIp + \")<br \\\\>\"", "+ \"/req\" + \"\\\" method=\\\"post\\\">\" view = view + \"<h4> To: </h4>\" view", "request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem():", "\"<title>A+</title>\" global c c = c + 1 view = view + \"<h3>", "requests[to].append(req) if not (uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\", code=302)", "\"<h3> Public Chat </h3>\" view = view + \"Connected as: \" + uID", "requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) lis = [] if not", "in chat: view = view + i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\" view", "\"\\\" method=\\\"post\\\">\" view = view + \"<input type=\\\"text\\\" name=\\\"msg\\\">\" view = view +", "# [from, to, status[0sent, 1accepted, 2rejected]] req = [uID, to, 0] if not", "view + \"Connected as: \" + uID + \" (\" + uIp +", "requests): requests[to] = [] requests[to].append(req) if not (uID in requests_sent): requests_sent[uID] = []", "that only the latest 50 messages are stored and displayed. <br \\\\><br \\\\>\"", "i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\"", "str(request.remote_addr)) def removeClient(uID): if uID in clients: clients.remove(uID) addChat(\"--- \" + uID +", "\" (\" + uIp + \")<br \\\\>\" view = view + \"Refresh the", "jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST']) def requestCH(): global requests, requests_sent uIp = request.access_route[0]", "version uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global c", "<a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp = request.access_route[0] uID = getUID(uIp)", "removeClient(uID) return jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}), 200", "view + i.replace(\"<\", \"\").replace(\">\", \"\") + \"<br \\\\>\" view = view + \"<br", "sendStatus(): global requests, requests_sent uIp = request.access_route[0] uID = getUID(uIp) lis = []", "\"note that only the latest 50 messages are stored and displayed. <br \\\\><br", "import hashlib app = Flask(__name__) c = 0 clients = [] chat =", "200 @app.route(\"/unannounce\", methods=[\"GET\"]) def unannounceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp)", "= view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in chat: view = view", "\"</form>\" view = view + \"<br \\\\><hr \\\\>\" view = view + \"A+", "not (uID in requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received': requests[uID]}), 200", "+ \": \" + msg) return redirect(\"/\", code=302) @app.route(\"/get_my_ip\", methods=[\"GET\"]) def get_my_ip(): return", "uID not in clients: clients.append(uID) addChat(\"--- \" + uID + \" Joined the", "\" + str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data():", "methods=[\"GET\"]) def unannounceThem(): global chat uIp = request.access_route[0] uID = getUID(uIp) removeClient(uID) return", "redirect(\"/\", code=302) else: return jsonify({'error': \"400: POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def", "\"\") if(additive > 50): chat.pop(0) chat.append(toAdd) def addClient(uID): if uID not in clients:", "view + \"<br \\\\><hr \\\\>\" view = view + \"A+ v. \" +", "= {} requests_sent = {} version = 5 additive = 0 def getUID(ip):", "\"\\\" method=\\\"post\\\">\" view = view + \"<h4> To: </h4>\" view = view +", "return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit = True): global chat, additive if limit:", "view = view + \"<h3> Send a Chat Request </h3>\" view = view", "uIp + \")<br \\\\>\" view = view + \"Refresh the page to access", "requests_sent = {} version = 5 additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest()", "clients.remove(uID) addChat(\"--- \" + uID + \" Left the Chat ---\") @app.route('/') def", "import jsonify import hashlib app = Flask(__name__) c = 0 clients = []", "clients = [] chat = [] #[from, to, status[0sent, 1accepted, 2rejected]] requests =", "uID in clients: clients.remove(uID) addChat(\"--- \" + uID + \" Left the Chat", "Chat </h3>\" view = view + \"Connected as: \" + uID + \"", "the latest messages.\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\" for i in", "\\\\>\" return view, 200 if __name__ == '__main__': # Bind to PORT if", "= Flask(__name__) c = 0 clients = [] chat = [] #[from, to,", "return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\")) def addChat(toAdd, limit = True): global", "view, 200 if __name__ == '__main__': # Bind to PORT if defined, otherwise", "page to access the latest messages.\" view = view + \"<br \\\\>-----------------------------------------------------------------------<br \\\\>\"", "\"<hr \\\\>\" return view, 200 if __name__ == '__main__': # Bind to PORT", "= view + \"A+ v. \" + str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\"", "[] if not (uID in requests): requests[uID] = [] return jsonify({'sent': requests_sent[uID], 'received':", "not (uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return", "if not (to in requests): requests[to] = [] requests[to].append(req) if not (uID in", "chat.append(toAdd) def addClient(uID): if uID not in clients: clients.append(uID) addChat(\"--- \" + uID", "not (to in requests): requests[to] = [] requests[to].append(req) if not (uID in requests_sent):", "request.form: to = request.form['to'] # [from, to, status[0sent, 1accepted, 2rejected]] req = [uID,", "chat, version uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) view = \"<title>A+</title>\" global", "status[0sent, 1accepted, 2rejected]] req = [uID, to, 0] if not (to in requests):", "removeClient(uID): if uID in clients: clients.remove(uID) addChat(\"--- \" + uID + \" Left", "Send a Chat Request </h3>\" view = view + \"<hr \\\\>\" view =", "str(version) + \" | <a href=\\\"https://raw.githubusercontent.com/jonnelafin/A-/master/LICENSE\\\">LICENSE</a>\" return(view) @app.route('/post', methods=['POST']) def handle_data(): uIp =", "jsonify({'you': uID}), 200 @app.route(\"/list\", methods=[\"GET\"]) def listAnnounced(): return jsonify({'clients': clients}), 200 @app.route(\"/req\", methods=['POST'])", "type=\\\"submit\\\">\" view = view + \"</form>\" view = view + \"<br \\\\><hr \\\\>\"", "+ uID + \" Left the Chat ---\") @app.route('/') def hello(): global chat,", "clients.append(uID) addChat(\"--- \" + uID + \" Joined the Chat ---\") print(\"connection from", "request.access_route[0], 'id' : getUID(request.access_route[0])}), 200 @app.route(\"/announce\", methods=[\"GET\"]) def announceThem(): global chat uIp =", "5 additive = 0 def getUID(ip): return hashlib.sha256(str(ip).encode(\"utf8\")).hexdigest() def getUN(ip): return int(str(ip).replace(\".\", \"\"))", "+ \"Connected as: \" + uID + \" (\" + uIp + \")<br", "uID = getUID(uIp) lis = [] if not (uID in requests_sent): requests_sent[uID] =", "if not (uID in requests_sent): requests_sent[uID] = [] requests_sent[uID].append(req) return redirect(\"/\", code=302) else:", "in requests_sent): requests_sent[uID] = [] if not (uID in requests): requests[uID] = []", "view = view + \"<form action=\\\" \" + \"/req\" + \"\\\" method=\\\"post\\\">\" view", "methods=[\"GET\"]) def sendView(): view = \"\" view = view + \"<h3> Send a", "request from flask import jsonify import hashlib app = Flask(__name__) c = 0", "+ \"<hr \\\\>\" return view, 200 if __name__ == '__main__': # Bind to", "\"\" view = view + \"<h3> Send a Chat Request </h3>\" view =", "\"400: POST Request expected\"}), 400 @app.route(\"/status\", methods=[\"GET\"]) def sendStatus(): global requests, requests_sent uIp", "view + \"</form>\" view = view + \"<hr \\\\>\" return view, 200 if", "requests_sent[uID].append(req) return redirect(\"/\", code=302) else: return jsonify({'error': \"400: POST Request expected\"}), 400 @app.route(\"/status\",", "flask import request from flask import jsonify import hashlib app = Flask(__name__) c", "request.access_route[0] uID = getUID(uIp) lis = [] if not (uID in requests_sent): requests_sent[uID]", "type=\\\"submit\\\">\" view = view + \"</form>\" view = view + \"<hr \\\\>\" return", "1accepted, 2rejected]] req = [uID, to, 0] if not (to in requests): requests[to]", "c + 1 view = view + \"<h3> Public Chat </h3>\" view =", "global chat uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) return jsonify({'you': uID}), 200", "global chat, version uIp = request.access_route[0] uID = getUID(uIp) addClient(uID) view = \"<title>A+</title>\"", "= view + \"<h3> Send a Chat Request </h3>\" view = view +", "if limit: additive = additive + 1 print(\"new chat: \" + toAdd) toAdd" ]
[ "= [-1, 0] shape_format = [[[np.int32, i, [32, 3, 3, 3]], [np.int32, i,", "[16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0]", "KIND, either express or implied. # See the License for the specific language", "output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self,", "Unless required by applicable law or agreed to in writing, software # distributed", "0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype", "[np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) # scala-----------------------------------------------------------------", "language governing permissions and # limitations under the License. import torch import numpy", "np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype", "[16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list =", "self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [32,", "common_utils import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor", "[[[np.float16, i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list =", "a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required", "format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 7]]] for i in", "input1, input2): output = torch.ne(input1, input2) output = output.to(\"cpu\") output = output.numpy().astype(np.int32) return", "= cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1,", "[np.int32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list", "def cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output", "cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1,", "device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [32, 3, 3,", "0] shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]] for i in format_list]", "this file except in compliance with the License. # You may obtain a", "def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [64, 7]]]", "i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device):", "i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list =", "and # limitations under the License. import torch import numpy as np import", "3]], [np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) def", "governing permissions and # limitations under the License. import torch import numpy as", "scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i,", "def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [448, 1]],", "ANY KIND, either express or implied. # See the License for the specific", "output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to(\"cpu\")", "format_list = [-1, 0, 3] shape_format = [[[np.float16, i, [32, 3, 3, 3]],", "npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1,", "self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16]],", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format", "[np.float16, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list", "return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def", "3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1,", "[[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format)", "torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out", "= [-1, 0] shape_format = [[[np.float32, i, [64, 24, 38]]] for i in", "self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64,", "= create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 =", "[448, 1]], [np.float32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "[-1, 0] shape_format = [[[np.float16, i, [32, 3, 3, 3]]] for i in", "input1, input2, out): torch.ne(input1, input2, out=out) output = out.to(\"cpu\") output = output.numpy().astype(np.int32) return", "1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0]", "test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [32, 3, 3,", "def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [448, 1]],", "shape_format = [[[np.float32, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self,", "import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2):", "output = torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2):", "i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list", "self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0]", "[32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list", "output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out) output =", "= create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool)", "def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [32,", "= [[[np.int32, i, [16]], [np.int32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "[-1, 0] shape_format = [[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]] for", "i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format =", "shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32, i, [16, 640, 640]]] for", "[[[np.float16, i, [16]], [np.float16, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self,", "3]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0,", "in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float32,", "= self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp", "OF ANY KIND, either express or implied. # See the License for the", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3]", "cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp,", "input2, out): torch.ne(input1, input2, out=out) output = out.to(\"cpu\") output = output.numpy().astype(np.int32) return output", "input1, input2): output = torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output def npu_op_exec(self,", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float32,", "640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1,", "input2) output = output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2): output = torch.ne(input1,", "obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless", "[16]], [np.float32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list", "[[[np.int32, i, [16, 640, 640]], [np.int32, i, [16, 640, 640]]] for i in", "shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]] for i in format_list] self.not_equal_result(shape_format)", "0, 3] shape_format = [[[np.float32, i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def", "rights reserved. # # Licensed under the BSD 3-Clause License (the \"License\"); #", "You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause #", "npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output,", "0] shape_format = [[[np.float32, i, [32, 3, 3, 3]]] for i in format_list]", "640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0]", "i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format =", "0, 3] shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32, i, [16, 640,", "shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16, i, [16, 640, 640]]] for", "3] shape_format = [[[np.float32, i, [32, 3, 3, 3]], [np.float32, i, [32, 3,", "= [-1, 0] shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]] for i", "input2): output = torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output def npu_op_exec(self, input1,", "scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1,", "= self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16,", "cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3)", "3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(), except_for=\"cpu\") if __name__ == \"__main__\":", "= [-1, 0, 3] shape_format = [[[np.float16, i, 18]] for i in format_list]", "= copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i,", "npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output def", "i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device):", "def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [32, 3,", "640, 640]], [np.int32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def", "npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar)", "640]], [np.int32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self,", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3] shape_format", "3]]] for i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list =", "test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, [32, 3,", "100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output =", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.int32,", "specific language governing permissions and # limitations under the License. import torch import", "output = output.to(\"cpu\") output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2)", "return output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out) output = out.to(\"cpu\")", "[16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list =", "device): format_list = [-1, 0] shape_format = [[[np.int32, i, [32, 3, 3, 3]],", "0, 3] shape_format = [[[np.float16, i, [32, 3, 3, 3]], [np.float16, i, [32,", "self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64,", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3] shape_format", "= self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list", "of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable", "def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, 18]]", "0] shape_format = [[[np.float32, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def", "return output def npu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.to(\"cpu\")", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format =", "2020, Huawei Technologies.All rights reserved. # # Licensed under the BSD 3-Clause License", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp,", "in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32,", "= output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out) output", "3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0]", "npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1,", "device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 7]]] for i", "format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1,", "out=out) output = out.to(\"cpu\") output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for", "copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by", "required by applicable law or agreed to in writing, software # distributed under", "self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp =", "i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(), except_for=\"cpu\")", "applicable law or agreed to in writing, software # distributed under the License", "output.to(\"cpu\") output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output =", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32,", "npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16]],", "def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [32, 3,", "format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i,", "[-1, 0] shape_format = [[[np.float32, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format)", "i, [448, 1]], [np.float16, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def", "0] shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]] for i in format_list]", "3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list =", "or agreed to in writing, software # distributed under the License is distributed", "npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format", "= [-1, 0] shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16, i, [16,", "= [-1, 0] shape_format = [[[np.float32, i, [64, 7]]] for i in format_list]", "640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0,", "self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16,", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "device): format_list = [-1, 0] shape_format = [[[np.float32, i, [448, 1]], [np.float32, i,", "640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0,", "the License. import torch import numpy as np import copy from common_utils import", "as np import copy from common_utils import TestCase, run_tests from common_device_type import dtypes,", "= [-1, 0] shape_format = [[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]]", "24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1,", "= cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1,", "3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(), except_for=\"cpu\") if __name__", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format =", "cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2,", "[64, 24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list =", "0] shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]] for i in format_list]", "0] shape_format = [[[np.float16, i, [32, 3, 3, 3]]] for i in format_list]", "npu_output_inp) def not_equal_result(self, shape_format): for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0,", "device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [16, 640, 640]],", "scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item in", "# https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in", "def npu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.to(\"cpu\") output =", "test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [32, 3, 3,", "output = torch.ne(input1, input2) output = output.to(\"cpu\") output = output.numpy().astype(np.int32) return output def", "writing, software # distributed under the License is distributed on an \"AS IS\"", "[32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self,", "License. # You may obtain a copy of the License at # #", "[[[np.float32, i, [16, 640, 640]], [np.float32, i, [16, 640, 640]]] for i in", "cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1,", "= self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp", "def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output", "= [[[np.int32, i, [16, 640, 640]], [np.int32, i, [16, 640, 640]]] for i", "compliance with the License. # You may obtain a copy of the License", "= [-1, 0] shape_format = [[[np.float16, i, [64, 7]]] for i in format_list]", "[np.int32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device):", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format =", "format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i,", "input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to(\"cpu\") output =", "# # Licensed under the BSD 3-Clause License (the \"License\"); # you may", "scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1,", "if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output =", "scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self,", "self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output)", "test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16, 640, 640]],", "# scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16,", "for the specific language governing permissions and # limitations under the License. import", "output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out)", "npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out)", "= cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2)", "for i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1,", "[-1, 0, 3] shape_format = [[[np.float16, i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format)", "0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output", "640]], [np.float32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self,", "i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format =", "[np.float32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device):", "1]], [np.float16, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device):", "= [[[np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def", "[32, 3, 3, 3]], [np.float16, i, [32, 3, 3, 3]]] for i in", "Licensed under the BSD 3-Clause License (the \"License\"); # you may not use", "from util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output = torch.ne(input1,", "i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def", "npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1", "npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1", "i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list", "test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 7]]] for", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.int32,", "= [-1, 0, 3] shape_format = [[[np.float32, i, [32, 3, 3, 3]], [np.float32,", "not use this file except in compliance with the License. # You may", "[16, 640, 640]], [np.float16, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format)", "at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed", "i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list =", "output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output", "3, 3]], [np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format)", "npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2)", "def not_equal_result(self, shape_format): for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100)", "640]], [np.float16, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self,", "self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16,", "item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1],", "shape_format = [[[np.float32, i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device):", "[32, 3, 3, 3]], [np.float32, i, [32, 3, 3, 3]]] for i in", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list =", "3] shape_format = [[[np.float32, i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self,", "= [[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]] for i in format_list]", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3] shape_format =", "[-1, 0, 3] shape_format = [[[np.float32, i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format)", "dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output", "in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3] shape_format =", "test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 24, 38]]]", "[[[np.float32, i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list =", "[448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1,", "[-1, 0] shape_format = [[[np.float16, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format)", "# you may not use this file except in compliance with the License.", "copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output =", "[[[np.int32, i, [32, 3, 3, 3]], [np.int32, i, [32, 3, 3, 3]]] for", "device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 24, 38]]] for", "device): format_list = [-1, 0] shape_format = [[[np.float16, i, [448, 1]], [np.float16, i,", "test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [16]], [np.float32, i,", "def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [16,", "agreed to in writing, software # distributed under the License is distributed on", "3] shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32, i, [16, 640, 640]]]", "run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase): def", "return output def not_equal_scalar_result(self, shape_format): for item in shape_format: scalar = np.random.uniform(0, 100)", "[18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0]", "= out.to(\"cpu\") output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for item in", "for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(), except_for=\"cpu\") if __name__ == \"__main__\": run_tests()", "npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1,", "[[[np.float32, i, [64, 24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device):", "(the \"License\"); # you may not use this file except in compliance with", "3, 3]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1,", "= input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1,", "self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [32,", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i,", "npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device):", "format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 24, 38]]] for i", "np import copy from common_utils import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests", "common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1,", "test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16]], [np.int32, i,", "device): format_list = [-1, 0] shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]]", "format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32,", "cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3)", "shape_format): for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2", "# Unless required by applicable law or agreed to in writing, software #", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float16,", "self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i,", "device): format_list = [-1, 0] shape_format = [[[np.int32, i, [448, 1]], [np.int32, i,", "by applicable law or agreed to in writing, software # distributed under the", "0] shape_format = [[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]] for i", "BSD 3-Clause License (the \"License\"); # you may not use this file except", "def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1,", "[-1, 0, 3] shape_format = [[[np.float32, i, [32, 3, 3, 3]], [np.float32, i,", "test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, 18]] for", "from common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self,", "= [-1, 0] shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32, i, [16,", "= [[[np.float16, i, [16]], [np.float16, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def", "format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i,", "under the License. import torch import numpy as np import copy from common_utils", "= [[[np.float32, i, [16]], [np.float32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def", "format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [18]]] for i in", "for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format", "input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output =", "in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float32,", "[np.float32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list =", "input1, input2): input1.ne_(input2) output = input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self,", "output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output =", "test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [32, 3, 3,", "input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output", "self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [448,", "file except in compliance with the License. # You may obtain a copy", "def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16]], [np.float16,", "self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp =", "cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1,", "in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0,", "shape_format = [[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]] for i in", "[[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format)", "= create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 =", "format_list = [-1, 0] shape_format = [[[np.float16, i, [32, 3, 3, 3]]] for", "shape_format = [[[np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format)", "self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i,", "format_list = [-1, 0] shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16, i,", "= torch.ne(input1, input2) output = output.to(\"cpu\") output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self,", "device): format_list = [-1, 0] shape_format = [[[np.float16, i, [64, 7]]] for i", "= [-1, 0] shape_format = [[[np.float32, i, [32, 3, 3, 3]]] for i", "License for the specific language governing permissions and # limitations under the License.", "shape_format = [[[np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format)", "1]], [np.float32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device):", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32,", "to in writing, software # distributed under the License is distributed on an", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3] shape_format =", "implied. # See the License for the specific language governing permissions and #", "format_list = [-1, 0] shape_format = [[[np.float16, i, [64, 7]]] for i in", "the BSD 3-Clause License (the \"License\"); # you may not use this file", "[-1, 0] shape_format = [[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]] for", "\"License\"); # you may not use this file except in compliance with the", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format =", "in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3]", "in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16,", "output = out.to(\"cpu\") output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for item", "device): format_list = [-1, 0] shape_format = [[[np.float32, i, [32, 3, 3, 3]]]", "= [-1, 0] shape_format = [[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]]", "format_list = [-1, 0] shape_format = [[[np.int32, i, [448, 1]], [np.int32, i, [448,", "output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for item in shape_format: scalar", "License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or", "TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase):", "18]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0,", "[[[np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self,", "not_equal_scalar_result(self, shape_format): for item in shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 =", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i,", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format =", "format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i,", "def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16]], [np.int32,", "= self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3]", "npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item in shape_format: cpu_input1,", "or implied. # See the License for the specific language governing permissions and", "self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i,", "[np.float16, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device):", "cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out =", "out): torch.ne(input1, input2, out=out) output = out.to(\"cpu\") output = output.numpy().astype(np.int32) return output def", "[np.float16, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list =", "output def npu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.to(\"cpu\") output", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list =", "shape_format = [[[np.float16, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self,", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3] shape_format =", "[[[np.int32, i, [16]], [np.int32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self,", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i,", "[[[np.float32, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list", "[16, 640, 640]], [np.float32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format)", "format_list = [-1, 0] shape_format = [[[np.float32, i, [448, 1]], [np.float32, i, [448,", "in writing, software # distributed under the License is distributed on an \"AS", "3-Clause License (the \"License\"); # you may not use this file except in", "0, 3] shape_format = [[[np.float32, i, [32, 3, 3, 3]], [np.float32, i, [32,", "self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format =", "i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list =", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format", "[16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0]", "[-1, 0] shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]] for i in", "cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16:", "input2, out=out) output = out.to(\"cpu\") output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format):", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "in shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3", "for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format", "test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [18]]] for", "shape_format = [[[np.int32, i, [448, 1]], [np.int32, i, [448, 1]]] for i in", "= [-1, 0, 3] shape_format = [[[np.float32, i, [18]]] for i in format_list]", "self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [448,", "for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format", "[-1, 0, 3] shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32, i, [16,", "# # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to", "self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [64,", "you may not use this file except in compliance with the License. #", "TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.numpy().astype(np.int32) return", "cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self,", "i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1,", "# You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause", "[-1, 0] shape_format = [[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]] for", "shape_format = [[[np.float32, i, [64, 24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format", "self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [32,", "# limitations under the License. import torch import numpy as np import copy", "input2): input1.ne_(input2) output = input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1,", "test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [16, 640,", "import copy from common_utils import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from", "= [[[np.float16, i, [16, 640, 640]], [np.float16, i, [16, 640, 640]]] for i", "shape_format): for item in shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0],", "format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [32, 3, 3, 3]],", "= [[[np.float32, i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list", "cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16:", "[np.float32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list", "[32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list", "shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 =", "use this file except in compliance with the License. # You may obtain", "format_list = [-1, 0, 3] shape_format = [[[np.float16, i, 18]] for i in", "= [[[np.float16, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device):", "self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3] shape_format =", "0] shape_format = [[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]] for i", "self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [16]],", "self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def", "shape_format = [[[np.float16, i, [32, 3, 3, 3]], [np.float16, i, [32, 3, 3,", "[32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(), except_for=\"cpu\") if", "cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3", "create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32)", "# Licensed under the BSD 3-Clause License (the \"License\"); # you may not", "copy from common_utils import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test", "[16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0]", "def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out) output = out.to(\"cpu\") output =", "device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, 18]] for i", "def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, [32,", "[16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list =", "7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0]", "640, 640]], [np.float32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def", "640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1,", "def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16, 640,", "test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [448, 1]], [np.float32,", "1]], [np.int32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device):", "[np.int32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(),", "format_list = [-1, 0] shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]] for", "= [-1, 0] shape_format = [[[np.float16, i, [32, 3, 3, 3]]] for i", "[-1, 0] shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16, i, [16, 640,", "shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]] for i in format_list] self.not_equal_result(shape_format)", "cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out = self.npu_op_exec_out(npu_input1, scalar,", "= [-1, 0, 3] shape_format = [[[np.float16, i, [32, 3, 3, 3]], [np.float16,", "npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float16,", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_3d(self, device): format_list = [-1, 0, 3] shape_format", "shape_format = [[[np.float16, i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device):", "create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32)", "for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 =", "for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3]", "for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format", "[448, 1]], [np.int32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "permissions and # limitations under the License. import torch import numpy as np", "= self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for", "torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output =", "3, 3]], [np.int32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format)", "output = input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2, out):", "npu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.to(\"cpu\") output = output.numpy().astype(np.int32)", "i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format =", "self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item in shape_format: cpu_input1, npu_input1", "self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item", "npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item in shape_format: cpu_input1, npu_input1 =", "test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [64, 7]]] for", "3]], [np.int32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual,", "# # Unless required by applicable law or agreed to in writing, software", "def not_equal_scalar_result(self, shape_format): for item in shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1", "self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i,", "= [[[np.float32, i, [16, 640, 640]], [np.float32, i, [16, 640, 640]]] for i", "express or implied. # See the License for the specific language governing permissions", "self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [448,", "= [[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]] for i in format_list]", "3, 3, 3]], [np.int32, i, [32, 3, 3, 3]]] for i in format_list]", "[448, 1]], [np.float16, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self,", "i, [16, 640, 640]], [np.float16, i, [16, 640, 640]]] for i in format_list]", "format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i,", "def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 24,", "input1.ne_(input2) output = input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2,", "[-1, 0] shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32, i, [16, 640,", "shape_format = [[[np.int32, i, [32, 3, 3, 3]], [np.int32, i, [32, 3, 3,", "either express or implied. # See the License for the specific language governing", "= copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar)", "[[[np.float16, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list", "[[[np.float32, i, [16]], [np.float32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self,", "input2) output = output.to(\"cpu\") output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2):", "cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out", "self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp =", "[[[np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self,", "License. import torch import numpy as np import copy from common_utils import TestCase,", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "3] shape_format = [[[np.float16, i, [32, 3, 3, 3]], [np.float16, i, [32, 3,", "device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]]", "= [-1, 0] shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]] for i", "3, 3]], [np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format)", "torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2): output =", "i, [32, 3, 3, 3]], [np.int32, i, [32, 3, 3, 3]]] for i", "== torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar)", "3, 3, 3]], [np.float32, i, [32, 3, 3, 3]]] for i in format_list]", "def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16, 640,", "[-1, 0] shape_format = [[[np.float32, i, [64, 24, 38]]] for i in format_list]", "0] shape_format = [[[np.float32, i, [64, 24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format)", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i,", "== torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2) npu_output", "the License. # You may obtain a copy of the License at #", "device): format_list = [-1, 0] shape_format = [[[np.float16, i, [32, 3, 3, 3]]]", "may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # #", "https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing,", "100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype ==", "i, [448, 1]], [np.float32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def", "[[[np.float16, i, [32, 3, 3, 3]], [np.float16, i, [32, 3, 3, 3]]] for", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1,", "0, 3] shape_format = [[[np.float16, i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format) def", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class", "= [[[np.int32, i, [32, 3, 3, 3]], [np.int32, i, [32, 3, 3, 3]]]", "1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0]", "i, [16]], [np.float32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device):", "shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32, i, [16, 640, 640]]] for", "100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype ==", "i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format =", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3]", "format_list = [-1, 0] shape_format = [[[np.float16, i, [448, 1]], [np.float16, i, [448,", "for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format", "[-1, 0] shape_format = [[[np.float32, i, [32, 3, 3, 3]]] for i in", "i, [32, 3, 3, 3]], [np.float16, i, [32, 3, 3, 3]]] for i", "format_list = [-1, 0] shape_format = [[[np.int32, i, [16]], [np.int32, i, [16]]] for", "i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1,", "shape_format = [[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]] for i in", "= [[[np.float16, i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list", "copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output", "output def not_equal_scalar_result(self, shape_format): for item in shape_format: scalar = np.random.uniform(0, 100) cpu_input1,", "torch.ne(input1, input2) output = output.to(\"cpu\") output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1,", "format_list = [-1, 0] shape_format = [[[np.int32, i, [32, 3, 3, 3]], [np.int32,", "3] shape_format = [[[np.float16, i, 18]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_1d(self,", "self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item in shape_format:", "output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32)", "import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output", "Huawei Technologies.All rights reserved. # # Licensed under the BSD 3-Clause License (the", "= output.to(\"cpu\") output = output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output", "0] shape_format = [[[np.float16, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def", "input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2)", "i, [448, 1]], [np.int32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def", "[-1, 0] shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]] for i in", "= [[[np.float32, i, [32, 3, 3, 3]], [np.float32, i, [32, 3, 3, 3]]]", "i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list", "with the License. # You may obtain a copy of the License at", "(c) 2020, Huawei Technologies.All rights reserved. # # Licensed under the BSD 3-Clause", "output = output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2): output = torch.ne(input1, input2)", "npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 =", "create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output =", "= self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp)", "format_list = [-1, 0] shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]] for", "= self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp)", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32,", "i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0,", "output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for item in shape_format: scalar = np.random.uniform(0,", "class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.numpy().astype(np.int32)", "= [-1, 0] shape_format = [[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]]", "= np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if", "cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, scalar) npu_output = self.npu_op_exec(npu_input1, scalar) npu_output_out =", "device): format_list = [-1, 0, 3] shape_format = [[[np.float16, i, [32, 3, 3,", "0] shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32, i, [16, 640, 640]]]", "in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float16,", "Technologies.All rights reserved. # # Licensed under the BSD 3-Clause License (the \"License\");", "[[[np.float16, i, [16, 640, 640]], [np.float16, i, [16, 640, 640]]] for i in", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32,", "cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output def", "Copyright (c) 2020, Huawei Technologies.All rights reserved. # # Licensed under the BSD", "format_list = [-1, 0] shape_format = [[[np.float32, i, [32, 3, 3, 3]]] for", "i, [16, 640, 640]], [np.int32, i, [16, 640, 640]]] for i in format_list]", "import numpy as np import copy from common_utils import TestCase, run_tests from common_device_type", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "= output.numpy().astype(np.int32) return output def cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return", "instantiate_device_type_tests from util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output =", "return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to(\"cpu\") output = output.numpy().astype(np.int32)", "[[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format)", "self.cpu_op_exec(cpu_input1, cpu_input2) npu_output = self.npu_op_exec(npu_input1, npu_input2) npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp =", "npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format):", "shape_format = [[[np.float32, i, [16]], [np.float32, i, [16]]] for i in format_list] self.not_equal_result(shape_format)", "cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1, cpu_input2)", "[-1, 0, 3] shape_format = [[[np.float16, i, [32, 3, 3, 3]], [np.float16, i,", "[32, 3, 3, 3]], [np.int32, i, [32, 3, 3, 3]]] for i in", "in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.int32,", "reserved. # # Licensed under the BSD 3-Clause License (the \"License\"); # you", "i, [64, 24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list", "[-1, 0] shape_format = [[[np.int32, i, [32, 3, 3, 3]], [np.int32, i, [32,", "i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device):", "= [[[np.float16, i, [32, 3, 3, 3]], [np.float16, i, [32, 3, 3, 3]]]", "= [-1, 0, 3] shape_format = [[[np.float32, i, [16, 640, 640]], [np.float32, i,", "[np.int32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list =", "= [[[np.float32, i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device):", "3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0]", "device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]]", "i, [64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list =", "test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16]], [np.float16, i,", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format", "in compliance with the License. # You may obtain a copy of the", "def test_not_equal_scalar_shape_format_fp32_1d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [18]]]", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float16,", "numpy as np import copy from common_utils import TestCase, run_tests from common_device_type import", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i,", "for item in shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0,", "npu_output_out = self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2)", "in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.int32,", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_4d(self, device): format_list = [-1, 0, 3] shape_format", "out.to(\"cpu\") output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for item in shape_format:", "test_not_equal_shape_format_fp32_4d(self, device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [32, 3,", "See the License for the specific language governing permissions and # limitations under", "format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "= input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to(\"cpu\") output", "npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out) output = out.to(\"cpu\") output = output.numpy().astype(np.int32)", "scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output,", "7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0]", "from common_utils import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test import", "input2): output = torch.ne(input1, input2) output = output.to(\"cpu\") output = output.numpy().astype(np.int32) return output", "cpu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.numpy().astype(np.int32) return output def npu_op_inplace_exec(self, input1, input2):", "[64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_3d(self, device): format_list = [-1,", "1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0]", "= [[[np.float32, i, [64, 24, 38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self,", "create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if", "0] shape_format = [[[np.float16, i, [448, 1]], [np.float16, i, [448, 1]]] for i", "torch import numpy as np import copy from common_utils import TestCase, run_tests from", "i, [16, 640, 640]], [np.float32, i, [16, 640, 640]]] for i in format_list]", "= self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output,", "under the BSD 3-Clause License (the \"License\"); # you may not use this", "= [[[np.float32, i, [448, 1]], [np.float32, i, [448, 1]]] for i in format_list]", "def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [32, 3,", "scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool)", "= [[[np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def", "i, [448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_3d(self, device): format_list =", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format", "[16]], [np.int32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device): format_list", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format", "except in compliance with the License. # You may obtain a copy of", "i, [16]], [np.float16, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device):", "38]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_4d(self, device): format_list = [-1, 0]", "self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out) self.assertRtolEqual(cpu_output_inp, npu_output_inp) def", "import torch import numpy as np import copy from common_utils import TestCase, run_tests", "i, [18]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp16_2d(self, device): format_list = [-1,", "0] shape_format = [[[np.int32, i, [32, 3, 3, 3]], [np.int32, i, [32, 3,", "100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 =", "util_test import create_common_tensor class TestNotEqual(TestCase): def cpu_op_exec(self, input1, input2): output = torch.ne(input1, input2)", "def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [16]], [np.float32,", "the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law", "output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2, out=out) output = out.to(\"cpu\") output", "device): format_list = [-1, 0, 3] shape_format = [[[np.float32, i, [18]]] for i", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "640, 640]], [np.float16, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format) def", "torch.ne(input1, input2, out=out) output = out.to(\"cpu\") output = output.numpy().astype(np.int32) return output def not_equal_scalar_result(self,", "3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device):", "0, 100) npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2", "= [-1, 0] shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]] for i", "input1.to(\"cpu\") output = output.numpy().astype(np.int32) return output def npu_op_exec_out(self, input1, input2, out): torch.ne(input1, input2,", "3, 3]]] for i in format_list] self.not_equal_result(shape_format) instantiate_device_type_tests(TestNotEqual, globals(), except_for=\"cpu\") if __name__ ==", "test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i, [448, 1]], [np.float16,", "i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_2d(self, device): format_list = [-1,", "[32, 3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_shape_format_int32_1d(self, device): format_list", "npu_input3 = copy.deepcopy(cpu_input1).to(\"npu\").to(torch.bool) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32)", "640, 640]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_4d(self, device): format_list = [-1,", "[-1, 0] shape_format = [[[np.float16, i, [16]], [np.float16, i, [16]]] for i in", "[16, 640, 640]], [np.int32, i, [16, 640, 640]]] for i in format_list] self.not_equal_result(shape_format)", "= torch.ne(input1, input2) output = output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2): output", "not_equal_result(self, shape_format): for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2,", "# Copyright (c) 2020, Huawei Technologies.All rights reserved. # # Licensed under the", "def test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [448, 1]],", "[448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1,", "[64, 7]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1,", "shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100) cpu_input2, npu_input2 = create_common_tensor(item[1], 0, 100)", "i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format =", "item in shape_format: scalar = np.random.uniform(0, 100) cpu_input1, npu_input1 = create_common_tensor(item[0], 0, 100)", "3, 3, 3]]] for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list =", "0] shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16, i, [16, 640, 640]]]", "test_not_equal_shape_format_int32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [448, 1]], [np.int32,", "self.assertRtolEqual(cpu_output_inp, npu_output_inp) def not_equal_result(self, shape_format): for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0],", "[np.float16, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_4d(self,", "device): format_list = [-1, 0] shape_format = [[[np.float16, i, [16, 640, 640]], [np.float16,", "format_list = [-1, 0] shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32, i,", "License (the \"License\"); # you may not use this file except in compliance", "output def npu_op_inplace_exec(self, input1, input2): input1.ne_(input2) output = input1.to(\"cpu\") output = output.numpy().astype(np.int32) return", "npu_output_out = self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar)", "[16]], [np.float16, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp16_3d(self, device): format_list = [-1, 0] shape_format =", "self.assertRtolEqual(cpu_output_inp, npu_output_inp) def test_not_equal_shape_format_fp16_1d(self, device): format_list = [-1, 0] shape_format = [[[np.float16, i,", "i, [32, 3, 3, 3]], [np.float32, i, [32, 3, 3, 3]]] for i", "3, 3]]] for i in format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list", "= self.npu_op_exec_out(npu_input1, scalar, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, scalar) npu_output_inp = self.npu_op_inplace_exec(npu_input1, scalar) self.assertRtolEqual(cpu_output,", "for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format", "for i in format_list] self.not_equal_scalar_result(shape_format) def test_not_equal_scalar_shape_format_fp32_4d(self, device): format_list = [-1, 0] shape_format", "[448, 1]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_2d(self, device): format_list = [-1,", "test_not_equal_shape_format_int32_3d(self, device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16, 640, 640]],", "the specific language governing permissions and # limitations under the License. import torch", "def test_not_equal_scalar_shape_format_fp32_2d(self, device): format_list = [-1, 0] shape_format = [[[np.float32, i, [64, 7]]]", "= output.numpy().astype(np.int32) return output def npu_op_exec(self, input1, input2): output = torch.ne(input1, input2) output", "npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output) self.assertRtolEqual(cpu_output, npu_output_out)", "device): format_list = [-1, 0] shape_format = [[[np.int32, i, [16, 640, 640]], [np.int32,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_fp32_1d(self, device): format_list = [-1, 0] shape_format =", "i, [16]], [np.int32, i, [16]]] for i in format_list] self.not_equal_result(shape_format) def test_not_equal_shape_format_int32_2d(self, device):", "shape_format = [[[np.float32, i, [32, 3, 3, 3]], [np.float32, i, [32, 3, 3,", "if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_input2 = cpu_input2.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1,", "format_list] self.not_equal_result(shape_format) # scala----------------------------------------------------------------- def test_not_equal_scalar_shape_format_fp16_1d(self, device): format_list = [-1, 0, 3] shape_format", "3]], [np.float32, i, [32, 3, 3, 3]]] for i in format_list] self.not_equal_result(shape_format) #", "self.npu_op_exec_out(npu_input1, npu_input2, npu_input3) cpu_output_inp = self.cpu_op_inplace_exec(cpu_input1, cpu_input2) npu_output_inp = self.npu_op_inplace_exec(npu_input1, npu_input2) self.assertRtolEqual(cpu_output, npu_output)", "= output.numpy().astype(np.int32) return output def not_equal_scalar_result(self, shape_format): for item in shape_format: scalar =", "limitations under the License. import torch import numpy as np import copy from", "[[[np.float32, i, [32, 3, 3, 3]], [np.float32, i, [32, 3, 3, 3]]] for", "3, 3, 3]], [np.float16, i, [32, 3, 3, 3]]] for i in format_list]" ]
[ "} } # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata = hp.es.search(index=idx, body=query)", "ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def get_ip_site(idx,", ": { \"field\" : str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" } }", "or (ip not in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def get_ip_site(idx, dateFrom,", "in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip =", "not None)) or (host not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'],", ": dateTo } } }, \"_source\" : False, \"aggregations\" : { \"groupby\" :", "\"timestamp\" : { \"from\" : dateFrom, \"to\" : dateTo } } }, {", "} } }, { str(fld+\"_host\") : { \"terms\" : { \"field\" : str(fld+\"_host\"),", ": 9999, \"sources\" : [ { \"src\" : { \"terms\" : { \"field\"", "{ \"dest_site\" : { \"terms\" : { \"field\" : \"dest_site\" } } }", "\"missing_bucket\" : True, \"order\" : \"asc\" } } } ] } } }", "{ \"must\" : [ { \"range\" : { \"timestamp\" : { \"from\" :", "['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field]", "= item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys()) and", ": { \"terms\" : { \"field\" : \"dest_host\" } } }, { \"src_site\"", "in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and", "} } }, { \"src_host\" : { \"terms\" : { \"field\" : \"src_host\"", "{ \"from\" : dateFrom, \"to\" : dateTo } } }, \"_source\" : False,", "\"host\" : { \"terms\" : { \"field\" : \"host.keyword\", \"missing_bucket\" : True, \"order\"", "results = hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host =", "= hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host']", "\"field\" : str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } }, {", "valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def", "'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query", "in res_host_site.keys()): res_host_site[host] = site return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return", "dateFrom, dateTo): def q_ip_site (fld): return { \"size\" : 0, \"query\" : {", "return aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return { \"size\" :", "= valueField[idx] query = { \"size\" : 0, \"query\" : { \"bool\" :", "} ] } } } } res_ip_site = {} for field in ['src',", ": { \"field\" : \"src\" } } }, { \"dest\" : { \"terms\"", "{ \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\"", "None)) or (host not in res_host_site.keys()): res_host_site[host] = site return res_host_site def get_metadata(dateFrom,", "item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site'] is not None)) or (host not", "body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host", "\"size\" : 0, \"query\" : { \"bool\" : { \"must\" : [ {", ": { \"terms\" : { \"field\" : \"host.keyword\", \"missing_bucket\" : True, \"order\" :", "and (site is not None)) or (host not in res_host_site.keys()): res_host_site[host] = site", "ip = item['key'][field] host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host is", "{ \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } },", "for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host':", ": { \"terms\" : { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" :", "body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'],", "str(fld+\"_host\") : { \"terms\" : { \"field\" : str(fld+\"_host\"), \"missing_bucket\" : True, \"order\"", "dateFrom, dateTo): val_fld = valueField[idx] query = { \"size\" : 0, \"query\" :", "{ \"field\" : \"administrator.name\", \"missing_bucket\" : True, \"order\" : \"asc\" } } },", "\"_source\" : False, \"stored_fields\" : \"_none_\", \"aggregations\" : { \"groupby\" : { \"composite\"", "} } }, { \"admin_email\" : { \"terms\" : { \"field\" : \"administrator.email\",", "{ \"size\" : 9999, \"sources\" : [ { fld : { \"terms\" :", "query = { \"size\" : 0, \"query\" : { \"bool\" : { \"must\"", ": { \"bool\" : { \"must\" : [ { \"range\" : { \"timestamp\"", "aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site':", "= item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site is not None)) or (host", "\"order\" : \"asc\" } } }, { \"admin_name\" : { \"terms\" : {", "print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata = hp.es.search(index=idx, body=query) for item in", "\"administrator.name\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv6\" :", "\"_source\" : False, \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\"", "get_metadata(dateFrom, dateTo): def q_metadata(): return { \"size\" : 0, \"query\" : { \"range\"", "aggdata = hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src':", "\"size\" : 9999, \"sources\" : [ { str(fld+\"_site\") : { \"terms\" : {", "\"asc\" } } }, { \"ipv6\" : { \"terms\" : { \"field\" :", "in res_meta.keys()) and (item['key']['site'] is not None)) or (host not in res_meta.keys()): res_meta[host]", "} } } } res_ip_site = {} for field in ['src', 'dest']: results", "} } } # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata = hp.es.search(index=idx,", ": False, \"stored_fields\" : \"_none_\", \"aggregations\" : { \"groupby\" : { \"composite\" :", "{ \"terms\" : { \"field\" : \"dest\" } } }, { \"src_host\" :", "}, { \"dest_host\" : { \"terms\" : { \"field\" : \"dest_host\" } }", ": fld, \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_host\")", "str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } } ] } }", ": { \"composite\" : { \"size\" : 9999, \"sources\" : [ { str(fld+\"_site\")", "{ \"dest\" : { \"terms\" : { \"field\" : \"dest\" } } },", "] } }, \"_source\" : False, \"stored_fields\" : \"_none_\", \"aggregations\" : { \"groupby\"", "\"field\" : \"src_host\" } } }, { \"dest_host\" : { \"terms\" : {", "True } }, { \"term\" : { \"dest_production\" : True } } ]", "{ \"dest_production\" : True } } ] } }, \"_source\" : False, \"stored_fields\"", "hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host = item['key'][str(field+'_host')] if", "in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip", "[ { \"site\" : { \"terms\" : { \"field\" : \"config.site_name.keyword\", \"missing_bucket\" :", "in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site", ": 0, \"query\" : { \"bool\" : { \"must\" : [ { \"range\"", "dateFrom, \"lte\": dateTo } } }, { \"term\" : { \"src_production\" : True", "\"field\": val_fld } } } } } } # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs", "} }, { \"ipv6\" : { \"terms\" : { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\"", "{ \"field\" : \"administrator.email\", \"missing_bucket\" : True, \"order\" : \"asc\" } } },", "True } } ] } }, \"aggregations\" : { \"groupby\" : { \"composite\"", "{ \"avg\": { \"field\": val_fld } } } } } } # print(idx,", "} ] } } } } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {}", "valueField[idx] query = { \"size\" : 0, \"query\" : { \"bool\" : {", "or (host not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'],", "{ \"terms\" : { \"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\"", "\"term\" : { \"dest_production\" : True } } ] } }, \"aggregations\" :", "\"asc\" } } }, { str(fld+\"_site\") : { \"terms\" : { \"field\" :", "\"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"host\" :", "res_meta = {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host in", "\"dest_production\" : True } } ] } }, \"_source\" : False, \"stored_fields\" :", "\"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"host\" : {", "True, \"order\" : \"asc\" } } } ] } } } } res_ip_site", "{ \"timestamp\" : { \"from\" : dateFrom, \"to\" : dateTo } } },", "} } } ] } } } } res_ip_host = {} for field", ": str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv6\"", "str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\" : True, \"order\"", "\"asc\" } } } ] } } } } res_host_site = {} for", "True, \"order\" : \"asc\" } } } ] } } } } res_ip_host", "} } ] } } } } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta =", "= item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site'] is not None)) or (host", ": False, \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\" :", "} } ] } } } } res_ip_site = {} for field in", "9999, \"sources\" : [ { \"site\" : { \"terms\" : { \"field\" :", "dateTo): val_fld = valueField[idx] query = { \"size\" : 0, \"query\" : {", "dateTo): def q_metadata(): return { \"size\" : 0, \"query\" : { \"range\" :", "\"timestamp\" : { \"from\" : dateFrom, \"to\" : dateTo } } }, \"_source\"", "\"dest_host\" : { \"terms\" : { \"field\" : \"dest_host\" } } }, {", "\"asc\" } } } ] } } } } res_ip_host = {} for", "item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom,", "'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] })", "} } }, { \"dest\" : { \"terms\" : { \"field\" : \"dest\"", "[ { fld : { \"terms\" : { \"field\" : fld, \"missing_bucket\" :", ": { \"field\" : \"dest_site\" } } } ] }, \"aggs\": { val_fld:", "\"ipv6\", \"missing_bucket\" : True, \"order\" : \"asc\" } } } ] } }", "(site is not None)) or (ip not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6]", "\"query\" : { \"bool\" : { \"must\" : [ { \"range\" : {", "hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6", "True, \"order\" : \"asc\" } } }, { \"admin_email\" : { \"terms\" :", "host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host is not None) and", "in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and", "q_metadata(): return { \"size\" : 0, \"query\" : { \"range\" : { \"timestamp\"", "[] aggdata = hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo,", ": { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } }", ": { \"field\" : \"src_host\" } } }, { \"dest_host\" : { \"terms\"", "\"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"admin_name\" : {", "{ \"field\" : \"dest_host\" } } }, { \"src_site\" : { \"terms\" :", "'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value':", "str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_host\") :", "{ \"src_production\" : True } }, { \"term\" : { \"dest_production\" : True", "= {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item", "'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query = { \"size\"", "True, \"order\" : \"asc\" } } } ] } } } } results", "'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query =", "item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count']", "9999, \"sources\" : [ { \"src\" : { \"terms\" : { \"field\" :", "= item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site is not None)) or (ip", "= item['key'][field] host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host is not", ": { \"gt\" : dateFrom, \"lte\": dateTo } } }, { \"term\" :", "= [site, ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return", "'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs", "= { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx,", "def q_host_site (fld): return { \"size\" : 0, \"query\" : { \"bool\" :", "{ \"size\" : 9999, \"sources\" : [ { str(fld+\"_site\") : { \"terms\" :", ": 0, \"query\" : { \"range\" : { \"timestamp\" : { \"from\" :", "}, { \"admin_email\" : { \"terms\" : { \"field\" : \"administrator.email\", \"missing_bucket\" :", "def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query = { \"size\" : 0,", "\"terms\" : { \"field\" : \"dest_host\" } } }, { \"src_site\" : {", "} } } } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item", "{ \"composite\" : { \"size\" : 9999, \"sources\" : [ { str(fld+\"_site\") :", ": { \"field\" : \"dest\" } } }, { \"src_host\" : { \"terms\"", "res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'], 'ipv6': item['key']['ipv6'], 'ipv4': item['key']['ipv4']} return", "\"dest\" : { \"terms\" : { \"field\" : \"dest\" } } }, {", ": \"asc\" } } } ] } } } } res_host_site = {}", "} } } } } } # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = []", "} } res_ip_host = {} for field in ['src', 'dest']: results = hp.es.search(index=idx,", "item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return {", "= host return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return {", "\"order\" : \"asc\" } } } ] } } } } res_host_site =", "\"host.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } } ] } }", "] } } } } res_ip_site = {} for field in ['src', 'dest']:", "elasticsearch.helpers import scan import utils.helpers as hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd':", ": { \"timestamp\" : { \"from\" : dateFrom, \"to\" : dateTo } }", "(site is not None)) or (host not in res_host_site.keys()): res_host_site[host] = site return", "\"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, {", "aggrs = [] aggdata = hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']),", "\"size\" : 9999, \"sources\" : [ { fld : { \"terms\" : {", "from elasticsearch.helpers import scan import utils.helpers as hp valueField = { 'ps_packetloss': 'packet_loss',", ": [ { \"src\" : { \"terms\" : { \"field\" : \"src\" }", "{ \"src\" : { \"terms\" : { \"field\" : \"src\" } } },", "import scan import utils.helpers as hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean',", ": { \"terms\" : { \"field\" : \"ipv6\", \"missing_bucket\" : True, \"order\" :", "True } } ] } }, \"_source\" : False, \"stored_fields\" : \"_none_\", \"aggregations\"", "\"terms\" : { \"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" }", ": \"_none_\", \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\" :", ": { \"terms\" : { \"field\" : \"administrator.name\", \"missing_bucket\" : True, \"order\" :", ": { \"timestamp\" : { \"gt\" : dateFrom, \"lte\": dateTo } } },", "res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return { \"size\" : 0,", "\"range\" : { \"timestamp\" : { \"gt\" : dateFrom, \"lte\": dateTo } }", "res_ip_host = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for", ": \"dest_site\" } } } ] }, \"aggs\": { val_fld: { \"avg\": {", "{ \"from\" : dateFrom, \"to\" : dateTo } } }, { \"term\" :", "\"field\" : fld, \"missing_bucket\" : True, \"order\" : \"asc\" } } }, {", "dateFrom, dateTo): def q_host_site (fld): return { \"size\" : 0, \"query\" : {", "in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host':", "} } }, { \"term\" : { \"src_production\" : True } }, {", "\"terms\" : { \"field\" : \"administrator.email\", \"missing_bucket\" : True, \"order\" : \"asc\" }", ": True, \"order\" : \"asc\" } } }, { \"admin_email\" : { \"terms\"", "{ str(fld+\"_host\") : { \"terms\" : { \"field\" : str(fld+\"_host\"), \"missing_bucket\" : True,", "body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6 =", ": \"asc\" } } }, { \"admin_name\" : { \"terms\" : { \"field\"", "dateTo } } }, { \"term\" : { \"src_production\" : True } },", "and (host != ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip] = host return", "!= ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def", "{ 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom,", "and (site is not None)) or (ip not in res_ip_site.keys()): res_ip_site[ip] = [site,", "}, { \"admin_name\" : { \"terms\" : { \"field\" : \"administrator.name\", \"missing_bucket\" :", "hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' }", "{ \"terms\" : { \"field\" : \"src\" } } }, { \"dest\" :", "{ \"must\" : [ { \"range\" : { \"timestamp\" : { \"gt\" :", "ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site is not None)) or", "((ip in res_ip_site.keys()) and (site is not None)) or (ip not in res_ip_site.keys()):", "[ { \"range\" : { \"timestamp\" : { \"from\" : dateFrom, \"to\" :", "host = item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site is not None)) or", "\"to\" : dateTo } } }, { \"term\" : { \"src_production\" : True", ": [ { fld : { \"terms\" : { \"field\" : fld, \"missing_bucket\"", "= site return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return { \"size\" :", "'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host", "} ] } }, \"aggregations\" : { \"groupby\" : { \"composite\" : {", "'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'],", "item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host in res_host_site.keys())", "\"query\" : { \"range\" : { \"timestamp\" : { \"from\" : dateFrom, \"to\"", ": [ { \"site\" : { \"terms\" : { \"field\" : \"config.site_name.keyword\", \"missing_bucket\"", "return { \"size\" : 0, \"query\" : { \"range\" : { \"timestamp\" :", ": 9999, \"sources\" : [ { str(fld+\"_site\") : { \"terms\" : { \"field\"", "{ \"field\" : str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } }", ": \"asc\" } } }, { str(fld+\"_site\") : { \"terms\" : { \"field\"", "= hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site = item['key'][str(field+'_site')]", "'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx]", "} } ] } } } } res_host_site = {} for field in", "} } }, { \"ipv6\" : { \"terms\" : { \"field\" : \"ipv6\",", "} } }, { \"dest_site\" : { \"terms\" : { \"field\" : \"dest_site\"", "item['key'][field] host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host is not None)", "\"sources\" : [ { str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"),", ": fld, \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_site\")", "{ \"size\" : 0, \"query\" : { \"bool\" : { \"must\" : [", "\"dest\" } } }, { \"src_host\" : { \"terms\" : { \"field\" :", "results = hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host =", "} }, \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\" :", "] } } } } res_host_site = {} for field in ['src', 'dest']:", "} }, { \"dest_host\" : { \"terms\" : { \"field\" : \"dest_host\" }", "} } }, \"_source\" : False, \"aggregations\" : { \"groupby\" : { \"composite\"", ": { \"field\" : \"ipv6\", \"missing_bucket\" : True, \"order\" : \"asc\" } }", "\"term\" : { \"src_production\" : True } }, { \"term\" : { \"dest_production\"", "} } }, { \"admin_name\" : { \"terms\" : { \"field\" : \"administrator.name\",", "\"order\" : \"asc\" } } } ] } } } } res_ip_host =", "item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld):", ": \"ipv6\", \"missing_bucket\" : True, \"order\" : \"asc\" } } } ] }", "{ \"terms\" : { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" : \"asc\"", "def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return { \"size\" : 0, \"query\"", "} } }, { str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"),", "{ \"field\" : \"src_site\" } } }, { \"dest_site\" : { \"terms\" :", "aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'],", "dateTo } } }, \"_source\" : False, \"aggregations\" : { \"groupby\" : {", "} res_ip_host = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field))", ": { \"composite\" : { \"size\" : 9999, \"sources\" : [ { \"src\"", "item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def", ": { \"field\" : fld, \"missing_bucket\" : True, \"order\" : \"asc\" } }", ": [ { \"range\" : { \"timestamp\" : { \"from\" : dateFrom, \"to\"", "True, \"order\" : \"asc\" } } } ] } } } } res_host_site", ": True, \"order\" : \"asc\" } } } ] } } } }", "\"terms\" : { \"field\" : \"src\" } } }, { \"dest\" : {", "}, { \"term\" : { \"dest_production\" : True } } ] } },", ": { \"terms\" : { \"field\" : \"dest\" } } }, { \"src_host\"", "\"gt\" : dateFrom, \"lte\": dateTo } } }, { \"term\" : { \"src_production\"", "fld, \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_host\") :", ": \"asc\" } } }, { \"ipv6\" : { \"terms\" : { \"field\"", "{ \"field\" : \"src\" } } }, { \"dest\" : { \"terms\" :", "} } }, { \"src_site\" : { \"terms\" : { \"field\" : \"src_site\"", ": \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv4\"", "not None) and (host != ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip] =", "} } } res_ip_site = {} for field in ['src', 'dest']: results =", "[ { \"src\" : { \"terms\" : { \"field\" : \"src\" } }", "{ \"timestamp\" : { \"gt\" : dateFrom, \"lte\": dateTo } } }, {", "\"composite\" : { \"size\" : 9999, \"sources\" : [ { fld : {", "for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]:", "{ \"size\" : 0, \"query\" : { \"range\" : { \"timestamp\" : {", ": \"asc\" } } } ] } } } } results = hp.es.search(index='ps_meta',", "}) return aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return { \"size\"", "((ip in res_ip_host.keys()) and (host is not None) and (host != ip)) or", ": { \"range\" : { \"timestamp\" : { \"from\" : dateFrom, \"to\" :", "{ \"ipv6\" : { \"terms\" : { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True,", "}, { \"host\" : { \"terms\" : { \"field\" : \"host.keyword\", \"missing_bucket\" :", "} ] } }, \"_source\" : False, \"stored_fields\" : \"_none_\", \"aggregations\" : {", "{} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item in", "dateFrom, dateTo): def q_ip_host (fld): return { \"size\" : 0, \"query\" : {", "(host != ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host", ": True, \"order\" : \"asc\" } } }, { \"ipv4\" : { \"terms\"", "utils.helpers as hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput':", "\"terms\" : { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" : \"asc\" }", "for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site']", "host return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return { \"size\"", "{ \"range\" : { \"timestamp\" : { \"from\" : dateFrom, \"to\" : dateTo", "= {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item", "} }, { str(fld+\"_host\") : { \"terms\" : { \"field\" : str(fld+\"_host\"), \"missing_bucket\"", "results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in", "\"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\" : 9999, \"sources\"", "9999, \"sources\" : [ { fld : { \"terms\" : { \"field\" :", "\"size\" : 9999, \"sources\" : [ { \"site\" : { \"terms\" : {", "\"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, {", "'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld", "site return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return { \"size\" : 0,", "dateTo): def q_host_site (fld): return { \"size\" : 0, \"query\" : { \"bool\"", ": dateFrom, \"to\" : dateTo } } }, \"_source\" : False, \"aggregations\" :", "return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return { \"size\" : 0, \"query\"", "True, \"order\" : \"asc\" } } }, { str(fld+\"_host\") : { \"terms\" :", "\"sources\" : [ { \"src\" : { \"terms\" : { \"field\" : \"src\"", "{ \"terms\" : { \"field\" : fld, \"missing_bucket\" : True, \"order\" : \"asc\"", "\"src_site\" } } }, { \"dest_site\" : { \"terms\" : { \"field\" :", "\"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv4\" : {", "\"bool\" : { \"must\" : [ { \"range\" : { \"timestamp\" : {", ": { \"src_production\" : True } }, { \"term\" : { \"dest_production\" :", "res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return { \"size\" : 0, \"query\" :", "\"src_host\" } } }, { \"dest_host\" : { \"terms\" : { \"field\" :", "} } ] }, \"aggs\": { val_fld: { \"avg\": { \"field\": val_fld }", "} } } res_ip_host = {} for field in ['src', 'dest']: results =", "{ \"field\" : \"host.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }", "for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6']", "{ \"terms\" : { \"field\" : \"dest_host\" } } }, { \"src_site\" :", "} }, { \"ipv6\" : { \"terms\" : { \"field\" : \"ipv6\", \"missing_bucket\"", "\"asc\" } } } ] } } } } res_ip_site = {} for", "}, { \"term\" : { \"src_production\" : True } }, { \"term\" :", "res_ip_site[ip] = [site, ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld):", "{ \"terms\" : { \"field\" : \"administrator.email\", \"missing_bucket\" : True, \"order\" : \"asc\"", "aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return { \"size\" : 0,", "\"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv6\" : {", "\"field\" : \"ipv6\", \"missing_bucket\" : True, \"order\" : \"asc\" } } } ]", "}, { str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\" :", "\"asc\" } } }, { \"admin_email\" : { \"terms\" : { \"field\" :", ": True, \"order\" : \"asc\" } } }, { \"ipv6\" : { \"terms\"", "= hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')]", "\"dest_site\" } } } ] }, \"aggs\": { val_fld: { \"avg\": { \"field\":", ": { \"size\" : 9999, \"sources\" : [ { \"src\" : { \"terms\"", "item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom, dateTo): def", "def q_metadata(): return { \"size\" : 0, \"query\" : { \"range\" : {", "{ \"terms\" : { \"field\" : \"host.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\"", "[ { str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\" :", "res_meta.keys()) and (item['key']['site'] is not None)) or (host not in res_meta.keys()): res_meta[host] =", "\"must\" : [ { \"range\" : { \"timestamp\" : { \"gt\" : dateFrom,", "\"src\" } } }, { \"dest\" : { \"terms\" : { \"field\" :", "is not None)) or (host not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name':", "dateFrom, \"to\" : dateTo } } }, { \"term\" : { \"src_production\" :", "= item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host is not None) and (host", ": \"asc\" } } }, { \"host\" : { \"terms\" : { \"field\"", "\"must\" : [ { \"range\" : { \"timestamp\" : { \"from\" : dateFrom,", "res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site", ": \"asc\" } } }, { \"ipv4\" : { \"terms\" : { \"field\"", "{} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item in", "results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host =", "'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count':", "item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site", ": \"dest\" } } }, { \"src_host\" : { \"terms\" : { \"field\"", "} ] }, \"aggs\": { val_fld: { \"avg\": { \"field\": val_fld } }", "] } } } } res_ip_host = {} for field in ['src', 'dest']:", ": \"administrator.name\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv6\"", "} def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query = { \"size\" :", ": \"asc\" } } } ] } } } } res_ip_site = {}", ": { \"field\" : \"administrator.email\", \"missing_bucket\" : True, \"order\" : \"asc\" } }", "\"order\" : \"asc\" } } }, { \"ipv4\" : { \"terms\" : {", "'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host", "if ((ip in res_ip_site.keys()) and (site is not None)) or (ip not in", ": { \"must\" : [ { \"range\" : { \"timestamp\" : { \"from\"", "query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query = { \"size\" : 0, \"query\"", "item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site'] is", "res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return { \"size\" : 0,", "} ] } } } } res_host_site = {} for field in ['src',", "\"src_site\" : { \"terms\" : { \"field\" : \"src_site\" } } }, {", "} }, { \"dest_site\" : { \"terms\" : { \"field\" : \"dest_site\" }", ": { \"size\" : 9999, \"sources\" : [ { str(fld+\"_site\") : { \"terms\"", ": dateTo } } }, { \"term\" : { \"src_production\" : True }", "\"ipv6\" : { \"terms\" : { \"field\" : \"ipv6\", \"missing_bucket\" : True, \"order\"", ": { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } }", "{ \"site\" : { \"terms\" : { \"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True,", "'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site':", "9999, \"sources\" : [ { str(fld+\"_site\") : { \"terms\" : { \"field\" :", "] } } } } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for", "True, \"order\" : \"asc\" } } }, { \"ipv6\" : { \"terms\" :", "False, \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\" : 9999,", "{ \"size\" : 9999, \"sources\" : [ { \"src\" : { \"terms\" :", "\"dest_production\" : True } } ] } }, \"aggregations\" : { \"groupby\" :", "return res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return { \"size\" :", "= {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host in res_meta.keys())", "fld : { \"terms\" : { \"field\" : fld, \"missing_bucket\" : True, \"order\"", "'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo):", "} } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]:", "if ((host in res_meta.keys()) and (item['key']['site'] is not None)) or (host not in", "True, \"order\" : \"asc\" } } }, { str(fld+\"_site\") : { \"terms\" :", ": { \"field\" : \"administrator.name\", \"missing_bucket\" : True, \"order\" : \"asc\" } }", "True, \"order\" : \"asc\" } } }, { \"ipv4\" : { \"terms\" :", "} } }, { \"ipv6\" : { \"terms\" : { \"field\" : \"external_address.ipv6_address\",", "{ \"terms\" : { \"field\" : \"src_host\" } } }, { \"dest_host\" :", "not in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def", "} res_host_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field))", ": [ { str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\"", "in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site'] is not", "def q_ip_site (fld): return { \"size\" : 0, \"query\" : { \"bool\" :", "}, { \"dest_site\" : { \"terms\" : { \"field\" : \"dest_site\" } }", "\"stored_fields\" : \"_none_\", \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\"", ": { \"size\" : 9999, \"sources\" : [ { \"site\" : { \"terms\"", "\"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" }", "} }, { \"src_host\" : { \"terms\" : { \"field\" : \"src_host\" }", "not in res_host_site.keys()): res_host_site[host] = site return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata():", "\"field\" : \"administrator.email\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, {", "} } } res_host_site = {} for field in ['src', 'dest']: results =", "str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata = hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']:", "for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]:", "\"size\" : 9999, \"sources\" : [ { \"src\" : { \"terms\" : {", ": { \"from\" : dateFrom, \"to\" : dateTo } } }, \"_source\" :", "\"terms\" : { \"field\" : \"administrator.name\", \"missing_bucket\" : True, \"order\" : \"asc\" }", "}, { \"dest\" : { \"terms\" : { \"field\" : \"dest\" } }", "q_ip_site (fld): return { \"size\" : 0, \"query\" : { \"bool\" : {", "\"terms\" : { \"field\" : str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" : \"asc\" }", "{ \"field\": val_fld } } } } } } # print(idx, str(query).replace(\"\\'\", \"\\\"\"))", "val_fld: { \"avg\": { \"field\": val_fld } } } } } } #", "in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site =", "\"site\" : { \"terms\" : { \"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\"", "{ \"field\" : fld, \"missing_bucket\" : True, \"order\" : \"asc\" } } },", "is not None)) or (ip not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return", "def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return { \"size\" : 0, \"query\"", "\"field\" : \"dest_site\" } } } ] }, \"aggs\": { val_fld: { \"avg\":", ": { \"terms\" : { \"field\" : \"administrator.email\", \"missing_bucket\" : True, \"order\" :", "{ \"groupby\" : { \"composite\" : { \"size\" : 9999, \"sources\" : [", "['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field]", "\"src_production\" : True } }, { \"term\" : { \"dest_production\" : True }", "results = hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site =", "{ \"admin_name\" : { \"terms\" : { \"field\" : \"administrator.name\", \"missing_bucket\" : True,", "\"terms\" : { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" : \"asc\" }", ": \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"host\"", "\"asc\" } } }, { \"host\" : { \"terms\" : { \"field\" :", "{ \"range\" : { \"timestamp\" : { \"gt\" : dateFrom, \"lte\": dateTo }", "= {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item", "\"to\" : dateTo } } }, \"_source\" : False, \"aggregations\" : { \"groupby\"", "\"term\" : { \"dest_production\" : True } } ] } }, \"_source\" :", "\"field\" : \"administrator.name\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, {", "{ \"composite\" : { \"size\" : 9999, \"sources\" : [ { \"src\" :", "{ fld : { \"terms\" : { \"field\" : fld, \"missing_bucket\" : True,", "\"ipv6\" : { \"terms\" : { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\"", "def q_ip_host (fld): return { \"size\" : 0, \"query\" : { \"bool\" :", "} res_ip_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field))", "\"terms\" : { \"field\" : fld, \"missing_bucket\" : True, \"order\" : \"asc\" }", "\"dest_site\" : { \"terms\" : { \"field\" : \"dest_site\" } } } ]", "import utils.helpers as hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits',", "} }, { \"term\" : { \"dest_production\" : True } } ] }", ": \"host.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } } ] }", "} } } } res_ip_host = {} for field in ['src', 'dest']: results", ": True, \"order\" : \"asc\" } } }, { \"host\" : { \"terms\"", ": dateFrom, \"lte\": dateTo } } }, { \"term\" : { \"src_production\" :", "\"sources\" : [ { fld : { \"terms\" : { \"field\" : fld,", "\"composite\" : { \"size\" : 9999, \"sources\" : [ { \"src\" : {", "q_host_site (fld): return { \"size\" : 0, \"query\" : { \"bool\" : {", "(host is not None) and (host != ip)) or (ip not in res_ip_host.keys()):", "for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]:", "str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'],", "\"composite\" : { \"size\" : 9999, \"sources\" : [ { str(fld+\"_site\") : {", "None)) or (host not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email':", "{ \"terms\" : { \"field\" : \"dest_site\" } } } ] }, \"aggs\":", "\"field\" : \"dest_host\" } } }, { \"src_site\" : { \"terms\" : {", ": { \"must\" : [ { \"range\" : { \"timestamp\" : { \"gt\"", "results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site'] is not None))", "{ \"host\" : { \"terms\" : { \"field\" : \"host.keyword\", \"missing_bucket\" : True,", "\"field\" : \"host.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } } ]", "item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site is not None)) or (host not", "} } } } # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata =", "for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host = item['key'][str(field+'_host')] if ((ip in", "(ip not in res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def get_ip_site(idx, dateFrom, dateTo):", "} }, { \"ipv4\" : { \"terms\" : { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\"", ": { \"terms\" : { \"field\" : \"dest_site\" } } } ] },", "\"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_site\") : {", "\"from\" : dateFrom, \"to\" : dateTo } } }, \"_source\" : False, \"aggregations\"", "= hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'],", "{ \"composite\" : { \"size\" : 9999, \"sources\" : [ { fld :", "} }, { str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\"", "item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys())", "item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site is not None)) or (ip not", ": True, \"order\" : \"asc\" } } }, { str(fld+\"_site\") : { \"terms\"", "in res_ip_host.keys()) and (host is not None) and (host != ip)) or (ip", "} }, { \"src_site\" : { \"terms\" : { \"field\" : \"src_site\" }", "item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'],", "] }, \"aggs\": { val_fld: { \"avg\": { \"field\": val_fld } } }", "res_ip_site.keys()) and (site is not None)) or (ip not in res_ip_site.keys()): res_ip_site[ip] =", ": True, \"order\" : \"asc\" } } }, { str(fld+\"_host\") : { \"terms\"", "or (ip not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site def get_host_site(idx,", "} } } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item in", ": \"dest_host\" } } }, { \"src_site\" : { \"terms\" : { \"field\"", "\"dest_host\" } } }, { \"src_site\" : { \"terms\" : { \"field\" :", "{ \"terms\" : { \"field\" : str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" : \"asc\"", "{ \"terms\" : { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" : \"asc\"", "results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site", "} } } ] } } } } res_host_site = {} for field", ": { \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" :", "\"asc\" } } } ] } } } } results = hp.es.search(index='ps_meta', body=q_metadata())", "\"lte\": dateTo } } }, { \"term\" : { \"src_production\" : True }", "{ \"bool\" : { \"must\" : [ { \"range\" : { \"timestamp\" :", "\"admin_email\" : { \"terms\" : { \"field\" : \"administrator.email\", \"missing_bucket\" : True, \"order\"", ": { \"terms\" : { \"field\" : \"src_host\" } } }, { \"dest_host\"", "scan import utils.helpers as hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits':", "\"timestamp\" : { \"gt\" : dateFrom, \"lte\": dateTo } } }, { \"term\"", "item['key']['src'], 'dest': item['key']['dest'], 'src_host': item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'],", "'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx,", "}, \"aggs\": { val_fld: { \"avg\": { \"field\": val_fld } } } }", "\"order\" : \"asc\" } } }, { str(fld+\"_host\") : { \"terms\" : {", ": True, \"order\" : \"asc\" } } }, { \"admin_name\" : { \"terms\"", "res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'], 'ipv6': item['key']['ipv6'], 'ipv4': item['key']['ipv4']}", ": { \"dest_production\" : True } } ] } }, \"aggregations\" : {", "\"terms\" : { \"field\" : \"src_host\" } } }, { \"dest_host\" : {", "'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom, dateTo):", "False, \"stored_fields\" : \"_none_\", \"aggregations\" : { \"groupby\" : { \"composite\" : {", ": { \"terms\" : { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" :", "{ \"dest_production\" : True } } ] } }, \"aggregations\" : { \"groupby\"", "\"asc\" } } }, { \"ipv4\" : { \"terms\" : { \"field\" :", "in res_ip_site.keys()) and (site is not None)) or (ip not in res_ip_site.keys()): res_ip_site[ip]", "\"field\" : \"dest\" } } }, { \"src_host\" : { \"terms\" : {", "True, \"order\" : \"asc\" } } }, { \"admin_name\" : { \"terms\" :", "{ \"field\" : \"ipv6\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }", "\"ipv4\" : { \"terms\" : { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\"", ": True } } ] } }, \"aggregations\" : { \"groupby\" : {", "{ \"dest_host\" : { \"terms\" : { \"field\" : \"dest_host\" } } },", ": dateFrom, \"to\" : dateTo } } }, { \"term\" : { \"src_production\"", "} } }, { \"ipv4\" : { \"terms\" : { \"field\" : \"external_address.ipv4_address\",", "\"avg\": { \"field\": val_fld } } } } } } # print(idx, str(query).replace(\"\\'\",", "hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom, 'to':dateTo, 'src': item['key']['src'], 'dest':", ": \"src_host\" } } }, { \"dest_host\" : { \"terms\" : { \"field\"", "\"from\" : dateFrom, \"to\" : dateTo } } }, { \"term\" : {", "} } }, { \"dest_host\" : { \"terms\" : { \"field\" : \"dest_host\"", "if ((ip in res_ip_host.keys()) and (host is not None) and (host != ip))", ": True } }, { \"term\" : { \"dest_production\" : True } }", "get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return { \"size\" : 0, \"query\" :", "{} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item in", ": \"src_site\" } } }, { \"dest_site\" : { \"terms\" : { \"field\"", "0, \"query\" : { \"bool\" : { \"must\" : [ { \"range\" :", "= {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'], 'ipv6': item['key']['ipv6'], 'ipv4': item['key']['ipv4']} return res_meta", "item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site is not None))", "{ \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } },", "{ str(fld+\"_site\") : { \"terms\" : { \"field\" : str(fld+\"_site\"), \"missing_bucket\" : True,", ": { \"size\" : 9999, \"sources\" : [ { fld : { \"terms\"", "\"terms\" : { \"field\" : \"dest\" } } }, { \"src_host\" : {", "} results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host", "} } } ] } } } } res_ip_site = {} for field", ": { \"dest_production\" : True } } ] } }, \"_source\" : False,", "\"order\" : \"asc\" } } } ] } } } } res_ip_site =", "= item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site is not", "in res_host_site.keys()) and (site is not None)) or (host not in res_host_site.keys()): res_host_site[host]", "res_ip_host.keys()) and (host is not None) and (host != ip)) or (ip not", "} } }, { \"host\" : { \"terms\" : { \"field\" : \"host.keyword\",", ": str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } } ] }", "} }, \"_source\" : False, \"aggregations\" : { \"groupby\" : { \"composite\" :", "return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return { \"size\" :", "\"order\" : \"asc\" } } }, { \"ipv6\" : { \"terms\" : {", "\"size\" : 0, \"query\" : { \"range\" : { \"timestamp\" : { \"from\"", "= [] aggdata = hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash': str(item['key']['src']+'-'+item['key']['dest']), 'from':dateFrom,", "\"asc\" } } }, { \"admin_name\" : { \"terms\" : { \"field\" :", "{ \"ipv6\" : { \"terms\" : { \"field\" : \"ipv6\", \"missing_bucket\" : True,", "res_host_site.keys()) and (site is not None)) or (host not in res_host_site.keys()): res_host_site[host] =", "str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv6\" :", "{} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host in res_meta.keys()) and", ": 9999, \"sources\" : [ { fld : { \"terms\" : { \"field\"", "and (item['key']['site'] is not None)) or (host not in res_meta.keys()): res_meta[host] = {'site':", "\"sources\" : [ { \"site\" : { \"terms\" : { \"field\" : \"config.site_name.keyword\",", "}, { \"ipv6\" : { \"terms\" : { \"field\" : \"external_address.ipv6_address\", \"missing_bucket\" :", "\"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, {", "(ip not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site def get_host_site(idx, dateFrom,", ": \"asc\" } } } ] } } } } res_ip_host = {}", "'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host", "{ \"field\" : \"dest_site\" } } } ] }, \"aggs\": { val_fld: {", ": { \"terms\" : { \"field\" : \"src_site\" } } }, { \"dest_site\"", "val_fld } } } } } } # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs =", "}, \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\" : 9999,", "{ \"field\" : str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } },", "\"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"admin_email\" :", "def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return { \"size\" : 0, \"query\"", "}, { \"ipv6\" : { \"terms\" : { \"field\" : \"ipv6\", \"missing_bucket\" :", "{ \"terms\" : { \"field\" : \"ipv6\", \"missing_bucket\" : True, \"order\" : \"asc\"", "\"terms\" : { \"field\" : \"src_site\" } } }, { \"dest_site\" : {", "in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip =", ": { \"terms\" : { \"field\" : fld, \"missing_bucket\" : True, \"order\" :", "ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys())", "\"src\" : { \"terms\" : { \"field\" : \"src\" } } }, {", "and (host is not None) and (host != ip)) or (ip not in", "\"order\" : \"asc\" } } }, { \"host\" : { \"terms\" : {", "= hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host = item['key'][str(field+'_host')]", "}, { \"src_site\" : { \"terms\" : { \"field\" : \"src_site\" } }", "{ \"field\" : \"dest\" } } }, { \"src_host\" : { \"terms\" :", "} }, \"_source\" : False, \"stored_fields\" : \"_none_\", \"aggregations\" : { \"groupby\" :", ": { \"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } }", ": { \"field\" : \"dest_host\" } } }, { \"src_site\" : { \"terms\"", "\"aggs\": { val_fld: { \"avg\": { \"field\": val_fld } } } } }", "= item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site is not", "= { \"size\" : 0, \"query\" : { \"bool\" : { \"must\" :", "} } ] } } } } res_ip_host = {} for field in", "{ \"admin_email\" : { \"terms\" : { \"field\" : \"administrator.email\", \"missing_bucket\" : True,", "'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld =", "[ { \"range\" : { \"timestamp\" : { \"gt\" : dateFrom, \"lte\": dateTo", "is not None)) or (host not in res_host_site.keys()): res_host_site[host] = site return res_host_site", "None) and (host != ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip] = host", "body=q_metadata()) res_meta = {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if ((host", "\"field\" : \"src_site\" } } }, { \"dest_site\" : { \"terms\" : {", "\"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_host\") : {", "\"order\" : \"asc\" } } } ] } } } } results =", ": { \"terms\" : { \"field\" : str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" :", "] } }, \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\"", "item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host is not None) and (host !=", "} } } ] } } } } results = hp.es.search(index='ps_meta', body=q_metadata()) res_meta", "site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site is", "{ \"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } },", "{ \"src_host\" : { \"terms\" : { \"field\" : \"src_host\" } } },", "0, \"query\" : { \"range\" : { \"timestamp\" : { \"from\" : dateFrom,", "} } ] } }, \"aggregations\" : { \"groupby\" : { \"composite\" :", "{ \"field\" : \"src_host\" } } }, { \"dest_host\" : { \"terms\" :", "\"order\" : \"asc\" } } }, { \"admin_email\" : { \"terms\" : {", "}, \"_source\" : False, \"stored_fields\" : \"_none_\", \"aggregations\" : { \"groupby\" : {", "{ \"term\" : { \"src_production\" : True } }, { \"term\" : {", ": { \"field\" : str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" : \"asc\" } }", "} ] } } } } res_ip_host = {} for field in ['src',", ": \"src\" } } }, { \"dest\" : { \"terms\" : { \"field\"", ": [ { \"range\" : { \"timestamp\" : { \"gt\" : dateFrom, \"lte\":", "res_host_site[host] = site return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return { \"size\"", "} }, { \"admin_name\" : { \"terms\" : { \"field\" : \"administrator.name\", \"missing_bucket\"", "'ps_throughput': 'throughput' } def query4Avg(idx, dateFrom, dateTo): val_fld = valueField[idx] query = {", "} } res_host_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx,", ": { \"terms\" : { \"field\" : \"src\" } } }, { \"dest\"", "for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host in", "} }, { \"term\" : { \"src_production\" : True } }, { \"term\"", "((host in res_host_site.keys()) and (site is not None)) or (host not in res_host_site.keys()):", "def get_metadata(dateFrom, dateTo): def q_metadata(): return { \"size\" : 0, \"query\" : {", "} } } } res_host_site = {} for field in ['src', 'dest']: results", ": \"administrator.email\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"admin_name\"", "((host in res_meta.keys()) and (item['key']['site'] is not None)) or (host not in res_meta.keys()):", "q_ip_host (fld): return { \"size\" : 0, \"query\" : { \"bool\" : {", "True, \"order\" : \"asc\" } } }, { \"host\" : { \"terms\" :", "field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip", "}, { \"src_host\" : { \"terms\" : { \"field\" : \"src_host\" } }", "\"groupby\" : { \"composite\" : { \"size\" : 9999, \"sources\" : [ {", ": { \"composite\" : { \"size\" : 9999, \"sources\" : [ { fld", ": { \"composite\" : { \"size\" : 9999, \"sources\" : [ { \"site\"", "\"terms\" : { \"field\" : \"ipv6\", \"missing_bucket\" : True, \"order\" : \"asc\" }", "['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")]", "(fld): return { \"size\" : 0, \"query\" : { \"bool\" : { \"must\"", "is not None) and (host != ip)) or (ip not in res_ip_host.keys()): res_ip_host[ip]", "'doc_count': item['doc_count'] }) return aggrs def get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return", "\"order\" : \"asc\" } } }, { str(fld+\"_site\") : { \"terms\" : {", "{ \"src_site\" : { \"terms\" : { \"field\" : \"src_site\" } } },", "\"asc\" } } }, { str(fld+\"_host\") : { \"terms\" : { \"field\" :", "\"field\" : str(fld+\"_host\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } } ]", "if ((host in res_host_site.keys()) and (site is not None)) or (host not in", "(host not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'], 'ipv6':", "fld, \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_site\") :", ": 9999, \"sources\" : [ { \"site\" : { \"terms\" : { \"field\"", "hp.es.search(index='ps_meta', body=q_metadata()) res_meta = {} for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: host = item['key']['host'] if", "res_host_site.keys()): res_host_site[host] = site return res_host_site def get_metadata(dateFrom, dateTo): def q_metadata(): return {", "{ \"terms\" : { \"field\" : \"src_site\" } } }, { \"dest_site\" :", "host = item['key']['host'] if ((host in res_meta.keys()) and (item['key']['site'] is not None)) or", "{ \"gt\" : dateFrom, \"lte\": dateTo } } }, { \"term\" : {", "'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site", ": { \"field\" : \"src_site\" } } }, { \"dest_site\" : { \"terms\"", "[site, ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return {", "} } } } } # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata", "val_fld = valueField[idx] query = { \"size\" : 0, \"query\" : { \"bool\"", "dateFrom, \"to\" : dateTo } } }, \"_source\" : False, \"aggregations\" : {", "} }, { \"host\" : { \"terms\" : { \"field\" : \"host.keyword\", \"missing_bucket\"", "field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site", "res_ip_host[ip] = host return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld): return", "res_ip_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for", "# print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata = hp.es.search(index=idx, body=query) for item", "hp.es.search(index=idx, body=q_host_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: site = item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if", "item['key'][str(field+\"_site\")] host = item['key'][str(field+'_host')] if ((host in res_host_site.keys()) and (site is not None))", "\"external_address.ipv6_address\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"ipv4\" :", "as hp valueField = { 'ps_packetloss': 'packet_loss', 'ps_owd': 'delay_mean', 'ps_retransmits': 'retransmits', 'ps_throughput': 'throughput'", "None)) or (ip not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site def", ": { \"terms\" : { \"field\" : \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" :", "in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo): def", "\"terms\" : { \"field\" : \"dest_site\" } } } ] }, \"aggs\": {", "\"\\\"\")) aggrs = [] aggdata = hp.es.search(index=idx, body=query) for item in aggdata['aggregations']['groupby']['buckets']: aggrs.append({'hash':", "in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'], 'ipv6': item['key']['ipv6'], 'ipv4':", "\"src_host\" : { \"terms\" : { \"field\" : \"src_host\" } } }, {", "} }, { \"dest\" : { \"terms\" : { \"field\" : \"dest\" }", "(host not in res_host_site.keys()): res_host_site[host] = site return res_host_site def get_metadata(dateFrom, dateTo): def", "res_ip_host.keys()): res_ip_host[ip] = host return res_ip_host def get_ip_site(idx, dateFrom, dateTo): def q_ip_site (fld):", ": True } } ] } }, \"_source\" : False, \"stored_fields\" : \"_none_\",", ": str(fld+\"_site\"), \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { str(fld+\"_host\")", "dateTo): def q_ip_host (fld): return { \"size\" : 0, \"query\" : { \"bool\"", "}, \"_source\" : False, \"aggregations\" : { \"groupby\" : { \"composite\" : {", "{ \"term\" : { \"dest_production\" : True } } ] } }, \"aggregations\"", "item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if", "\"_none_\", \"aggregations\" : { \"groupby\" : { \"composite\" : { \"size\" : 9999,", "{ \"composite\" : { \"size\" : 9999, \"sources\" : [ { \"site\" :", "not None)) or (ip not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site", "res_host_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_host_site(field)) for", "\"admin_name\" : { \"terms\" : { \"field\" : \"administrator.name\", \"missing_bucket\" : True, \"order\"", "field in ['src', 'dest']: results = hp.es.search(index=idx, body=q_ip_site(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip", "body=q_ip_host(field)) for item in results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host = item['key'][str(field+'_host')] if ((ip", "dateTo): def q_ip_site (fld): return { \"size\" : 0, \"query\" : { \"bool\"", "{ \"term\" : { \"dest_production\" : True } } ] } }, \"_source\"", "\"field\" : \"src\" } } }, { \"dest\" : { \"terms\" : {", ": { \"groupby\" : { \"composite\" : { \"size\" : 9999, \"sources\" :", "get_ip_host(idx, dateFrom, dateTo): def q_ip_host (fld): return { \"size\" : 0, \"query\" :", "}, { \"ipv4\" : { \"terms\" : { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" :", "\"administrator.email\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"admin_name\" :", "}, { str(fld+\"_host\") : { \"terms\" : { \"field\" : str(fld+\"_host\"), \"missing_bucket\" :", "} } } ] }, \"aggs\": { val_fld: { \"avg\": { \"field\": val_fld", "return { \"size\" : 0, \"query\" : { \"bool\" : { \"must\" :", "} } res_ip_site = {} for field in ['src', 'dest']: results = hp.es.search(index=idx,", ": \"config.site_name.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"admin_email\"", ": \"asc\" } } }, { \"admin_email\" : { \"terms\" : { \"field\"", "} }, { \"admin_email\" : { \"terms\" : { \"field\" : \"administrator.email\", \"missing_bucket\"", "{ val_fld: { \"avg\": { \"field\": val_fld } } } } } }", "site = item['key'][str(field+'_site')] ipv6 = item['key']['ipv6'] if ((ip in res_ip_site.keys()) and (site is", "ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return { \"size\"", "(item['key']['site'] is not None)) or (host not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'],", "{ \"size\" : 9999, \"sources\" : [ { \"site\" : { \"terms\" :", "item['key']['src_host'], 'dest_host': item['key']['dest_host'], 'src_site': item['key']['src_site'], 'dest_site': item['key']['dest_site'], 'value': item[val_fld]['value'], 'doc_count': item['doc_count'] }) return", "not None)) or (host not in res_host_site.keys()): res_host_site[host] = site return res_host_site def", ": \"asc\" } } }, { str(fld+\"_host\") : { \"terms\" : { \"field\"", "not in res_meta.keys()): res_meta[host] = {'site': item['key']['site'], 'admin_name': item['key']['admin_name'], 'admin_email': item['key']['admin_email'], 'ipv6': item['key']['ipv6'],", "\"missing_bucket\" : True, \"order\" : \"asc\" } } }, { \"admin_email\" : {", "{ \"terms\" : { \"field\" : \"administrator.name\", \"missing_bucket\" : True, \"order\" : \"asc\"", "\"terms\" : { \"field\" : \"host.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" }", ": { \"from\" : dateFrom, \"to\" : dateTo } } }, { \"term\"", "{ \"ipv4\" : { \"terms\" : { \"field\" : \"external_address.ipv4_address\", \"missing_bucket\" : True,", ": { \"field\" : \"host.keyword\", \"missing_bucket\" : True, \"order\" : \"asc\" } }", "} # print(idx, str(query).replace(\"\\'\", \"\\\"\")) aggrs = [] aggdata = hp.es.search(index=idx, body=query) for", "\"range\" : { \"timestamp\" : { \"from\" : dateFrom, \"to\" : dateTo }", "not in res_ip_site.keys()): res_ip_site[ip] = [site, ipv6] return res_ip_site def get_host_site(idx, dateFrom, dateTo):", "} } ] } }, \"_source\" : False, \"stored_fields\" : \"_none_\", \"aggregations\" :", "or (host not in res_host_site.keys()): res_host_site[host] = site return res_host_site def get_metadata(dateFrom, dateTo):", "\"composite\" : { \"size\" : 9999, \"sources\" : [ { \"site\" : {", "results[\"aggregations\"][\"groupby\"][\"buckets\"]: ip = item['key'][field] host = item['key'][str(field+'_host')] if ((ip in res_ip_host.keys()) and (host", "get_host_site(idx, dateFrom, dateTo): def q_host_site (fld): return { \"size\" : 0, \"query\" :" ]
[ "self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset = self.sun.get_sunset() assert", "# ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self):", "{ \"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\":", "PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\":", "= self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self,", "Sun class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET", "\"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\",", "PM\" }, \"status\": \"OK\" } @classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get", "longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self):", "Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset = self.sun.get_sunset() assert sunset ==", "pytest from mock import patch from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE =", "from mock import patch from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE = \"00\"", "\"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\",", "from __future__ import print_function from __future__ import unicode_literals import pytest from mock import", "= patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE)", "setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun =", "AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\":", "self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def", "FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51", "\"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" },", "\"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34", "from __future__ import absolute_import from __future__ import division from __future__ import print_function from", "\"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43", "patch from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\"", "\"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\" } @classmethod", "__future__ import print_function from __future__ import unicode_literals import pytest from mock import patch", "__future__ import absolute_import from __future__ import division from __future__ import print_function from __future__", "\"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\",", "\"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59", "PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\":", "\"9:16:04 PM\" }, \"status\": \"OK\" } @classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get')", "from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE", "division from __future__ import print_function from __future__ import unicode_literals import pytest from mock", "= \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77", "FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = {", "PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\"", "= \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER", "\"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\",", "self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset = self.sun.get_sunset() assert sunset", "import patch from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE =", "self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE", "\"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\",", "unicode_literals import pytest from mock import patch from night_scheduler.framework.sun.sun import Sun class TestSun(object):", "import division from __future__ import print_function from __future__ import unicode_literals import pytest from", "def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun", "from __future__ import unicode_literals import pytest from mock import patch from night_scheduler.framework.sun.sun import", "\"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\" }", "Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def", "test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset = self.sun.get_sunset()", "= \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\":", "\"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\" } @classmethod def setup_method(self, method):", "\"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04", "\"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\" } @classmethod def setup_method(self, method): self.patcher_requests_get =", "patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod", "\"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\":", "AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\",", "self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() #", "= Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ##############################################################################################", "AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\":", "date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset()", "print_function from __future__ import unicode_literals import pytest from mock import patch from night_scheduler.framework.sun.sun", "class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET =", "\"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER =", "@classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER", "from __future__ import division from __future__ import print_function from __future__ import unicode_literals import", "TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77", "= \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\": \"4:26:42", "TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset = self.sun.get_sunset() assert sunset == TestSun.FAKE_SUNSET", "= self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE ))", "\"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": {", "PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\" } @classmethod def", "FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\":", "self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method):", "FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\"", "\"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\",", "import unicode_literals import pytest from mock import patch from night_scheduler.framework.sun.sun import Sun class", "\"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\": \"3:54:08", "self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def", "} @classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value =", "\"civil_twilight_begin\": \"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\":", "TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop()", "AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\" } @classmethod def setup_method(self, method): self.patcher_requests_get", "teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE,", "def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset =", "\"OK\" } @classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value", "__future__ import division from __future__ import print_function from __future__ import unicode_literals import pytest", "\"3:54:08 AM\", \"civil_twilight_end\": \"7:47:34 PM\", \"nautical_twilight_begin\": \"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39", "############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE, TestSun.FAKE_DATE )) def test__get_sunset__no_params__retuns_sunset_hour(self): sunset", "\"3:12:59 AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\":", "method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE,", "\"YYYY-MM-DD\" FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\": \"4:26:42 AM\",", "self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get", "import absolute_import from __future__ import division from __future__ import print_function from __future__ import", "absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals", "self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start() self.mock_requests_get.return_value = TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE,", "import Sun class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE = \"YYYY-MM-DD\"", "AM\", \"nautical_twilight_end\": \"8:28:43 PM\", \"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\"", "= TestSun.FAKE_SUNRISE_SUNSERT_ORG_ANSWER self.sun = Sun(latitude=TestSun.FAKE_LATITUDE, longitude=TestSun.FAKE_LONGITUDE, date=TestSun.FAKE_DATE) @classmethod def teardown_method(self, method): self.mock_requests_get =", "FAKE_SUNSET = \"99:88:77 PM\" FAKE_SUNRISE_SUNSERT_ORG_ANSWER = { \"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\":", "__future__ import unicode_literals import pytest from mock import patch from night_scheduler.framework.sun.sun import Sun", "{ \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\", \"day_length\": \"14:48:18\", \"civil_twilight_begin\":", "}, \"status\": \"OK\" } @classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get =", "\"astronomical_twilight_begin\": \"2:25:39 AM\", \"astronomical_twilight_end\": \"9:16:04 PM\" }, \"status\": \"OK\" } @classmethod def setup_method(self,", "night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE = \"11\" FAKE_DATE =", "= { \"results\": { \"sunrise\": \"4:26:42 AM\", \"sunset\": \"99:88:77 PM\", \"solar_noon\": \"11:50:51 AM\",", "mock import patch from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE = \"00\" FAKE_LONGITUDE", "import print_function from __future__ import unicode_literals import pytest from mock import patch from", "@classmethod def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format(", "import pytest from mock import patch from night_scheduler.framework.sun.sun import Sun class TestSun(object): FAKE_LATITUDE", "method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL, TestSun.FAKE_LATITUDE, TestSun.FAKE_LONGITUDE,", "\"status\": \"OK\" } @classmethod def setup_method(self, method): self.patcher_requests_get = patch('requests.get') self.mock_requests_get = self.patcher_requests_get.start()", "def teardown_method(self, method): self.mock_requests_get = self.patcher_requests_get.stop() # ############################################################################################## def test__get_sunset__no_params__calou_and_today_called(self): self.sun.get_sunset() self.mock_requests_get.assert_called_once_with(url=\"{}/json?lat={}&lng={}&date={}\".format( Sun.URL," ]
[ "parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:]) process = psutil.Process(int(args.pid)) return process.cmdline() sys.exit(wrap_scriptlet(run))", "import sys import psutil from wrap_scriptlet import wrap_scriptlet def run(): parser = argparse.ArgumentParser()", "import argparse import sys import psutil from wrap_scriptlet import wrap_scriptlet def run(): parser", "def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:]) process = psutil.Process(int(args.pid)) return", "sys import psutil from wrap_scriptlet import wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid')", "from wrap_scriptlet import wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:])", "run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:]) process = psutil.Process(int(args.pid)) return process.cmdline()", "wrap_scriptlet import wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:]) process", "wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:]) process = psutil.Process(int(args.pid))", "import psutil from wrap_scriptlet import wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args", "argparse import sys import psutil from wrap_scriptlet import wrap_scriptlet def run(): parser =", "python3 import argparse import sys import psutil from wrap_scriptlet import wrap_scriptlet def run():", "#!/usr/bin/env python3 import argparse import sys import psutil from wrap_scriptlet import wrap_scriptlet def", "psutil from wrap_scriptlet import wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args =", "import wrap_scriptlet def run(): parser = argparse.ArgumentParser() parser.add_argument('pid') args = parser.parse_args(sys.argv[1:]) process =" ]
[ "due to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk()", "customer ID') except Exception as e: pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer", "Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below to check balance in your", "check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue():", "Label, Entry, messagebox, Tk, END, ttk, LabelFrame import sqlite3 import datetime import time", "balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below to check balance in", "ID\\nReturning to Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET", "customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to", "Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame,", "Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer", "Account Opened',f\"Your account has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry)", "* FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1])", "Name text, Contact integer, State text, City text, Pincode integer, Email text, Balance", "Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data", "name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50)", "ID\\nReturning to Menu') except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?,", "bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value']", "this customer ID\\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs.", "Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def", "{new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home():", "find customer with this customer ID\\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You", "balance in your account\") except: pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt", "found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if", "#Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False", "#Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE", "amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer", "customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True", "withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your", "name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5)", "50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1)", "your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else:", ")''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data(", "* Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try:", "submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details", "if len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found with", "Transactions integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1: employee.execute('INSERT INTO Bank_Data", "text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data')", "process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy()", "Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass", "balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get()", "= 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value']", "with this customer ID') except Exception as e: pass transfer_window.destroy() return else: try:", "deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT", "ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill", "text='Drop your customer ID below to check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2)", "Customer found','Unable to find customer with this customer ID\\nReturning to Menu') except: pass", "Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit()", "to find customer with this customer ID\\nReturning to Menu') except:pass deposit_window.destroy() return def", "-*- \"\"\" Created on Tue May 19 15:51:38 2020 \"\"\" from tkinter import", "heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50)", "deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your", "submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window", "customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find", "your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove()", "def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to open a new", "cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey')", "pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money", "update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account')", "May 19 15:51:38 2020 \"\"\" from tkinter import Button, Label, Entry, messagebox, Tk,", "New Account') heading=Label(new_account,text='Fill Below details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5)", "heading=Label(new_account,text='Fill Below details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label", "employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome", "text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your", "ID\\nReturning to Menu') except Exception as e: pass transfer_window.destroy() return if found_receiver: if", "Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame,", "VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy()", "process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else: try:", "confirmation dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE", "messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to Menu') withdraw_window.destroy()", "customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date", "#submit Button def home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome", "except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit", "Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def", "balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below to check", "customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID integer, Name text, Contact", "balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check')", "FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer", "to add exit and confirmation dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance']", "balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning", "bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date text, Customer_count integer, Transactions", "SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try:", "amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0:", "tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter", "to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5)", "return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your account') amt=int(amount.get()) if", "progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60", "Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i", "progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100", "found','Unable to find customer with this customer ID\\nReturning to Menu') except: pass update_window.destroy()", "Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT", "def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender]))", "has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2)", "messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to Menu') except", "Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name", "progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks()", "VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT", "Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found", "END, ttk, LabelFrame import sqlite3 import datetime import time def process(process): bar=Tk() bar.title(f'Processing", "except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?,", ")''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1: employee.execute('INSERT INTO Bank_Data VALUES(0,0,0)') bank_data.commit()", "the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE", "balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below to check balance", "Date text, Customer_count integer, Transactions integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if", "Tue May 19 15:51:38 2020 \"\"\" from tkinter import Button, Label, Entry, messagebox,", "deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer():", "WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit()", "if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found with this customer ID')", "pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if", "Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30)", "dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF", "time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw", "text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of", "#Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5)", "return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE", "cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10)", "Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id", "try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to Menu')", "customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID integer, Name text, Contact integer,", "len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find customer with this customer", "customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0:", "messagebox.showerror('Low Balance',\"You don't have enough balance in your account\") except: pass if found:", "= 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def", "amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info", "Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM", "Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode", "employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit()", "withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5)", "city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email", "time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] =", "Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer", "amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning", "Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer", "submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10)", "Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1])", "* FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date text,", "{amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num))", "in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window", "text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label", "except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num))", "bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40", "withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit():", "new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has been created!!\\nCustomer", "time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw", "root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply", "NOT EXISTS customer_info( Customer_ID integer, Name text, Contact integer, State text, City text,", "Exception as e: pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable to find", "trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)):", "Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City", "FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info", "WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not", "Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance')", ",text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount", "root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30)", "def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender", "as e: pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable to find customer", "FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance", "Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account details has been", "#Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50)", "heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5)", "Created on Tue May 19 15:51:38 2020 \"\"\" from tkinter import Button, Label,", "transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10)", "amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=", "Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find customer", "#Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer ID", "new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico')", "last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred", "update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit", "Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit()", "details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to", "to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1)", "this customer ID') except Exception as e: pass transfer_window.destroy() return else: try: messagebox.showerror('No", "customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account", "#Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610", "Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New", "the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get()", "customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE", "your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for", "def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0)", "Customer_count integer, Transactions integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1: employee.execute('INSERT", "ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window)", "contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5)", "messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to Menu') except:pass", "WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to", "withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your account') amt=int(amount.get())", "acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and confirmation dialog root.mainloop() if __name__", "Rs. {amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE", "IF NOT EXISTS customer_info( Customer_ID integer, Name text, Contact integer, State text, City", "customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date text, Customer_count integer,", "this customer ID\\nReturning to Menu') except Exception as e: pass transfer_window.destroy() return if", "Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,)", "Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter", "must have Rs. {amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET", "pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender])", "def withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer", "created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button", "customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry)", "Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return", "2020 \"\"\" from tkinter import Button, Label, Entry, messagebox, Tk, END, ttk, LabelFrame", "open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to open a new Account',font=('bold',14))", "* FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer", "not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found with this customer ID') except", "WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your", "found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT", "Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if", "submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0)", "def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2)", "customer ID\\nReturning to Menu') except Exception as e: pass transfer_window.destroy() return if found_receiver:", "customer ID\\nReturning to Menu') except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET", "return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info", "customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable", "complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter", "time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] =", "#City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account,", "integer, Transactions integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1: employee.execute('INSERT INTO", "15:51:38 2020 \"\"\" from tkinter import Button, Label, Entry, messagebox, Tk, END, ttk,", "FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not", "ID\\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in", "found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver:", "email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT", "Menu') except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?,", "<filename>Bank.pyw<gh_stars>0 # -*- coding: utf-8 -*- \"\"\" Created on Tue May 19 15:51:38", "m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money", "customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1", "name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label", "def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has", "process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2)", "Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to open", "Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below", "update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=?", "found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+?", "your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance", "Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10))", "Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone()", "* FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No", "if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS", "cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the", "Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient", "text, Contact integer, State text, City text, Pincode integer, Email text, Balance integer", "# -*- coding: utf-8 -*- \"\"\" Created on Tue May 19 15:51:38 2020", "Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk()", "withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if", "id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT *", "details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50)", "Request','Money Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to", "if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET", "len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found with this", "#Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for the customer id", "e: pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable to find customer with", "text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID", "= 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks()", "employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO", "has been updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5)", "to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update", "Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1)", "#deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID", "find customer with this customer ID\\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check", "Label(balance_window, text='Drop your customer ID below to check balance in your account:', font=('bold',14),", "Customer found','Unable to find customer with this customer ID\\nReturning to Menu') except:pass deposit_window.destroy()", "found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT *", "Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient", "Customer found','Unable to find customer with this customer ID\\nReturning to Menu') withdraw_window.destroy() return", "deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True", "customer with this customer ID\\nReturning to Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False", "text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account,", "this customer ID\\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def", "bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money", "text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+", "sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window,", "trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1)", "employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully')", "bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value']", "VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome to Anonymous Banking')", "INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome to", "your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to", "bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk()", "to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num]))", "in your account\") except: pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT", "city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this", "found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender:", "Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State", "to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check", "with this customer ID\\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have", "your account\") except: pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO", "\"\"\" Created on Tue May 19 15:51:38 2020 \"\"\" from tkinter import Button,", "except Exception as e: pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info", "transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label", "return else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning", "customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO", "find customer with this customer ID\\nReturning to Menu') except:pass deposit_window.destroy() return def deposit_continues():", "your customer ID below to check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading", "customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label", "pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall()", "19 15:51:38 2020 \"\"\" from tkinter import Button, Label, Entry, messagebox, Tk, END,", "* FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM", "-*- coding: utf-8 -*- \"\"\" Created on Tue May 19 15:51:38 2020 \"\"\"", "details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and confirmation", "def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5)", "Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame,", "found=True if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry)", "customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1)", "100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in tkinter", "update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact", "Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update", "Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next", "Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount", "text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label", "Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in tkinter", "cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info", "Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey')", "window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label", "Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome to Anonymous", "your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and", "for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the", "last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw", "ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label", "last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk()", "#need to add exit and confirmation dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer", "transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window)", "amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer", "New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit)", "customer with this customer ID\\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must", "= 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks()", "INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except:", "heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove()", "TABLE IF NOT EXISTS customer_info( Customer_ID integer, Name text, Contact integer, State text,", "row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver: try:", "__name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info(", "details has been updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue)", "except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money", "have enough balance in your account\") except: pass if found: employee.execute('SELECT * Bank_Data')", "except: pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry)", "Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw", "transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if", "try: messagebox.showinfo('Update Details','Your account details has been updates Successfully!!') except: pass update_window.destroy() customer_data.commit()", "in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to", "Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account)", "(name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account details has been updates Successfully!!') except:", "Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def", "integer, Email text, Balance integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE", "to find customer with this customer ID\\nReturning to Menu') except Exception as e:", "open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account,", "to find customer with this customer ID\\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues():", "if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except:", "text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1)", "don't have enough balance in your account\") except: pass if found: employee.execute('SELECT *", "employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit()", "* FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit()", "last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount", "the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get()", "Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1)", "found with this customer ID') except Exception as e: pass transfer_window.destroy() return else:", "FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1)", "Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50)", "#Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw", "TABLE IF NOT EXISTS Bank_Data( Date text, Customer_count integer, Transactions integer )''') employee.execute('SELECT", "tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter", "WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO", "54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account", "in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True", "len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this", "20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] =", "customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now()", "m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check", "transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your", "for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try:", "Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk()", "bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New", "Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer", "frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5)", "Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1)", "process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks()", "bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your", "State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account", "Balance in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable", "Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT *", "Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver", "Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def", "Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112')", "row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No", "= 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks()", "text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label", "withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in", "import Button, Label, Entry, messagebox, Tk, END, ttk, LabelFrame import sqlite3 import datetime", "amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM", "customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill", "customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You don't have enough", "if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with", "Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10))", "progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1)", "Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50)", "Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4)", "try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled", "coding: utf-8 -*- \"\"\" Created on Tue May 19 15:51:38 2020 \"\"\" from", "ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for the customer", "check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer)", "found','Unable to find customer with this customer ID\\nReturning to Menu') withdraw_window.destroy() return def", "if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You", "text, Pincode integer, Email text, Balance integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\")", "Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass", "text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to", "VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else:", "integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS", "def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance", "Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No Customer", "customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You don't have", "def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20", "text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label", "customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer", "ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID integer, Name text,", "Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for the", "messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to Menu') except:", "process('customer Update') try: messagebox.showinfo('Update Details','Your account details has been updates Successfully!!') except: pass", "messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2)", "submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to open a", "customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer", "text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account,", "text, City text, Pincode integer, Email text, Balance integer )''') customer.execute('SELECT * FROM", "else: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to Menu')", "amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low", "if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE", "len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True", "LabelFrame import sqlite3 import datetime import time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100')", "from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new)", "amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=", "not fount','Customer not found with this customer ID') except Exception as e: pass", "account details has been updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find", "bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value']", "Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to find customer with", "WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find", "the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need", "cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your", "20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1)", "cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone()", "FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try:", "SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You don't have enough balance", "Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit", "#Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,)", "import sqlite3 import datetime import time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate')", "ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window)", "60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process}", "last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount", "update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get()", "text, Customer_count integer, Transactions integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1:", "cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None:", "process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has been", "text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email]", ",text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask", "{process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] =", "the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE", "text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000)", "progress['value'] = 20 bar.update_idletasks() time.sleep(1) progress['value'] = 40 bar.update_idletasks() time.sleep(1) progress['value'] = 50", "exit and confirmation dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor()", "details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else:", "* from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info", "submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter", "updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account():", "text, Balance integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF", "FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date text, Customer_count", ",text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask", "row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT *", "complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money')", "pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112')", "bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] = 20 bar.update_idletasks() time.sleep(1)", "id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT *", "fount','Customer not found with this customer ID') except Exception as e: pass transfer_window.destroy()", "find customer with this customer ID\\nReturning to Menu') except Exception as e: pass", "NOT EXISTS Bank_Data( Date text, Customer_count integer, Transactions integer )''') employee.execute('SELECT * FROM", "Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You don't have enough balance in", "pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from", "Transfer cancelled due to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def", "submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID", "SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt", "customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID integer, Name text, Contact integer, State", "root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw)", "?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find customer with", "amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning", "to Menu') except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?,", "to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for", "employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date text, Customer_count integer, Transactions integer )''')", "your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to", "id of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get()", "Menu') except Exception as e: pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE", "withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE", "for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue():", "from tkinter import Button, Label, Entry, messagebox, Tk, END, ttk, LabelFrame import sqlite3", "row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You don't", "Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient Balance')", "balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window,", "enough balance in your account\") except: pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1])", "ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def", "process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2)", "ID') except Exception as e: pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable", "found','Unable to find customer with this customer ID\\nReturning to Menu') except: pass balance_window.destroy()", "on Tue May 19 15:51:38 2020 \"\"\" from tkinter import Button, Label, Entry,", "deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer", "home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50)", "in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer", "withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your", "time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] =", "try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5)", "text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label", "for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue():", "WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO", "if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0:", "progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window", "Button, Label, Entry, messagebox, Tk, END, ttk, LabelFrame import sqlite3 import datetime import", "EXISTS customer_info( Customer_ID integer, Name text, Contact integer, State text, City text, Pincode", "Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10)", "Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit", "Bank_Data( Date text, Customer_count integer, Transactions integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall()", "with this customer ID\\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5)", "new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account,", "ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT", "Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your account')", "cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM", "import time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value']", "Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next", "else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient Balance') except: pass transfer_window.destroy()", "Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE", "i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No", "integer )''') employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1: employee.execute('INSERT INTO Bank_Data VALUES(0,0,0)')", "Balance',\"You don't have enough balance in your account\") except: pass if found: employee.execute('SELECT", "customer with this customer ID\\nReturning to Menu') except: pass update_window.destroy() return def Edit_details_continued():", "Below details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account,", "#Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your", "cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame,", "Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver Label", "messagebox.showinfo('Update Details','Your account details has been updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2)", "import datetime import time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady =", "try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5)", "#Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,))", "City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account details", "found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found with this customer", "def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0)", "if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl')", "e: pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE", "customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update')", "if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find customer with this", "#ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def", "to Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+?", "pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer')", "employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT EXISTS Bank_Data( Date text, Customer_count integer, Transactions integer", "pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop", "employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully')", "try: messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try:", "80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw():", "Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if", "SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount", "state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with", "have Rs. {amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-?", "in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT", "Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money')", "amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if", "your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey')", "Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT", "text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1)", "Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5)", "customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has been created!!\\nCustomer ID:", "sqlite3 import datetime import time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady", "customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT", "email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in", "pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window", "40 bar.update_idletasks() time.sleep(1) progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1)", "Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter", "withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer", "ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1)", "Opened',f\"Your account has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit()", "Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}')", "found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit()", "last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited", "== '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID", "#State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50)", "time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20) progress['value'] =", "Tk, END, ttk, LabelFrame import sqlite3 import datetime import time def process(process): bar=Tk()", "your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for", "progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy()", "Balance integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE IF NOT", "Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5)", "Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account details has", "messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due", "#Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for the customer id", "root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT", "Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter", "fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if", "= 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in", "Contact integer, State text, City text, Pincode integer, Email text, Balance integer )''')", "pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?,", "Update') try: messagebox.showinfo('Update Details','Your account details has been updates Successfully!!') except: pass update_window.destroy()", "customer ID\\nReturning to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()}", "this customer ID\\nReturning to Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE", "\"\"\" from tkinter import Button, Label, Entry, messagebox, Tk, END, ttk, LabelFrame import", "below to check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150)", "Edit_details(): update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window,", "integer, Name text, Contact integer, State text, City text, Pincode integer, Email text,", "Entry, messagebox, Tk, END, ttk, LabelFrame import sqlite3 import datetime import time def", "Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer", "Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account details has been updates Successfully!!')", "pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable to find customer with this", "if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1)", "def Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?',", "amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0:", "ID below to check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12))", "text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account,", "font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info", "Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You don't have enough balance in your account\")", "found','Unable to find customer with this customer ID\\nReturning to Menu') except Exception as", "deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30)", "of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get())", "messagebox.showerror('Receiver not fount','Customer not found with this customer ID') except Exception as e:", "Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to find customer", "your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to find", "Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200')", "contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer", "deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add", "Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and confirmation dialog root.mainloop() if", "email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning", "email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT * from Bank_Data') bank_record=employee.fetchall() last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process():", "withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details)", "utf-8 -*- \"\"\" Created on Tue May 19 15:51:38 2020 \"\"\" from tkinter", "#Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for", "customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1])", "balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below to check balance in your account:',", "to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance)", "as e: pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-?", "Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button submit=Button(withdraw_window,text='Find Customer',command=withdraw_continue,padx=5) submit.grid(row=0,column=2) def deposit(): deposit_window=Tk()", "city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account,", "Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available", "#Contact Label contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5)", "Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State", "WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None: details=[name,contact,state,city,pincode,email] for i in range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1)", "been updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def", "text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info", "messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2)", "cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT * FROM customer_info WHERE", "to find customer with this customer ID\\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window,", "frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label", "tkinter import Button, Label, Entry, messagebox, Tk, END, ttk, LabelFrame import sqlite3 import", "= 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5)", "cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer", "Button def home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to", "IF NOT EXISTS Bank_Data( Date text, Customer_count integer, Transactions integer )''') employee.execute('SELECT *", "Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False", "Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except: pass", "Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get()", "pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer", "messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green',", "Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',(customer_id,)).fetchone() if trim!=None:", "Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your Account:", "Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try:", "the Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue():", "State text, City text, Pincode integer, Email text, Balance integer )''') customer.execute('SELECT *", "new_account=Tk() new_account.title('Open New Account') heading=Label(new_account,text='Fill Below details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5)", "Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data", "* FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver", "SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer')", "add exit and confirmation dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\")", "customer with this customer ID\\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue)", "Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6)", "try: messagebox.showerror('Low Balance',\"You don't have enough balance in your account\") except: pass if", "text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the", "Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer", "customer ID\\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details():", "heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your", "to find customer with this customer ID\\nReturning to Menu') except: pass update_window.destroy() return", "else: try: messagebox.showerror('Low Balance',\"You don't have enough balance in your account\") except: pass", "if found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money", "account\") except: pass if found: employee.execute('SELECT * Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data", "found','Unable to find customer with this customer ID\\nReturning to Menu') except:pass deposit_window.destroy() return", "been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit", "pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient Balance') except: pass", "ttk, LabelFrame import sqlite3 import datetime import time def process(process): bar=Tk() bar.title(f'Processing {process}')", "with this customer ID\\nReturning to Menu') except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE", "state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account,", "time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue') complete.grid(row=1,column=1,pady=10)", "contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account,", "account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',(amt,cust_num)) found=True else: try:", "Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except: pass", "Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to find", "found=True else: try: messagebox.showerror('Low Balance',\"You don't have enough balance in your account\") except:", "to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num]))", "progress['value'] = 50 bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80", "WHERE Customer_ID=?',(amt,cust_num)) found=True else: try: messagebox.showerror('Low Balance',\"You don't have enough balance in your", "not found with this customer ID') except Exception as e: pass transfer_window.destroy() return", "customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1])", "Edit_details_continued(): customer.execute('UPDATE customer_info SET Name=?, Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id))", "contact=Entry(new_account,width=50) contact.grid(row=2,column=1) Label(new_account, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label", "text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label", ",text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount", "cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the", "Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT * FROM", "customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money", "to Menu') except Exception as e: pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]:", "Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label", "Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter", "last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen", "ID\\nReturning to Menu') except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk()", "submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame,", "check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30)", "to check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def", "fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove() heading.grid_remove() update_window.geometry('420x200') frame=LabelFrame(update_window,text='Fill your Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0)", "#ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def", "WHERE Customer_ID=?',[customer_id_sender])) if len(row_sender)>0: found_sender=True if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver]))", "customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found: employee.execute('SELECT * FROM Bank_Data')", "bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value'] = 100 time.sleep(0.5) complete=Label(bar,text=f'{process} completed!!',fg='blue')", "Pincode integer, Email text, Balance integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor()", "except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open", "Customer_ID integer, Name text, Contact integer, State text, City text, Pincode integer, Email", "customer ID\\nReturning to Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info", "state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5)", "update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150)", "#Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1) employee.execute('SELECT", "customer_info( Customer_ID integer, Name text, Contact integer, State text, City text, Pincode integer,", "amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to", "text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue(): customer_id=cust_id.get() cust_id.grid_remove() submit.grid_remove()", "Balance','Money Transfer cancelled due to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5)", "Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City", "the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the Customer id of Receiver').grid(row=2,column=0)", "def deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer", "city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50) trim=customer.execute('SELECT *", "window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label", "with this customer ID\\nReturning to Menu') except Exception as e: pass transfer_window.destroy() return", "state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode", "transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer", "ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for the customer", "messagebox, Tk, END, ttk, LabelFrame import sqlite3 import datetime import time def process(process):", "text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50) pincode.grid(row=5,column=1) Label(new_account, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(new_account, text='',width=50) email.grid(row=6,column=1)", "Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter", "deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window,", "bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button", "found_receiver: try: messagebox.showerror('Receiver not fount','Customer not found with this customer ID') except Exception", "update_window=Tk() update_window.title('Update Customer Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12))", "FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: messagebox.showerror('No Customer found','Unable", "Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter", "Customer found','Unable to find customer with this customer ID\\nReturning to Menu') except Exception", "Customer',command=deposit_continue,padx=5) submit.grid(row=0,column=2) def transfer(): transfer_window=Tk() transfer_window.title('Money Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer", "Exception as e: pass transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET", "to Menu') withdraw_window.destroy() return def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your", "pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE", "'__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID integer,", "bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except: pass withdraw_window.destroy() Button(withdraw_window,text='Next',command=withdraw_continues).grid(row=1,column=2) #Next Button", "except: pass balance_window.destroy() submit=Button(balance_window, text='Check Balance',font=('bold',12),command=balance_continue) submit.grid(row=2,column=0,pady=5) def Edit_details(): update_window=Tk() update_window.title('Update Customer Details')", "Account') heading=Label(new_account,text='Fill Below details to open a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name", "find customer with this customer ID\\nReturning to Menu') except: pass update_window.destroy() return def", "Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and confirmation dialog root.mainloop() if __name__ ==", "a new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5)", "return def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if", "def withdraw_continues(): messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt:", "customer_id=cust_id.get() customer.execute('SELECT Balance FROM customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try:", "#Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame, text='State',font=('bold',10)).grid(row=3,column=0,padx=5) #State Label state=Entry(frame,fg='grey',width=50) Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50)", "bar.update_idletasks() time.sleep(1) progress['value'] = 60 bar.update_idletasks() time.sleep(1) progress['value'] = 80 bar.update_idletasks() time.sleep(1) progress['value']", "customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy() Button(deposit_window,text='Next',command=deposit_continues).grid(row=1,column=2) #Next Button submit=Button(deposit_window,text='Find", "new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to", "update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and confirmation dialog", "customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not found_receiver: try: messagebox.showerror('Receiver not fount','Customer", "padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below", "text='Enter the Customer id of Receiver').grid(row=2,column=0) #Customer ID receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def", "Email text, Balance integer )''') customer.execute('SELECT * FROM customer_info') bank_data=sqlite3.connect(\"BankData.db\") employee=bank_data.cursor() employee.execute('''CREATE TABLE", "except: pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to find customer with this", "amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info", "customer id cust_id.grid(row=0,column=1) amount_label=Label(withdraw_window,text='Enter the amount to withdraw',font=('bold',10)) amount=Entry(withdraw_window) def withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT", "Details') update_window.geometry('600x112') heading=Label(update_window, text='Enter your Customer ID:',font=('bold',12),pady=10) heading.grid(row=0,column=0,columnspan=2) cust_id=Entry(update_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=1,ipadx=150) def Edit_details_continue():", "root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50)", "Details',bd=5,padx=10,pady=10) frame.grid(row=0,column=0) Label(frame, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(frame,width=50,fg='grey') Label(frame, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact Label contact=Entry(frame,width=50,fg='grey') Label(frame,", "def home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern", "with this customer ID\\nReturning to Menu') except:pass deposit_window.destroy() return def deposit_continues(): found=False amt=int(amount.get())", "new Account',font=('bold',14)) heading.grid(row=0,column=1,columnspan=2,pady=5) Label(new_account, text='Name',font=('bold',10)).grid(row=1,column=0,padx=5) #Name Label name=Entry(new_account, text='',width=50) name.grid(row=1,column=1) Label(new_account, text='Contact',font=('bold',10)).grid(row=2,column=0,padx=5) #Contact", "last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money", "completed!!',fg='blue') complete.grid(row=1,column=1,pady=10) bar.destroy() def withdraw(): withdraw_window=Tk() #withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window", "in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window", "Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(new_account, text='',width=50)", "account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window, fg='grey',font=('bold',12)) cust_id.grid(row=1,column=0,ipadx=150) def balance_continue(): customer_id=cust_id.get() customer.execute('SELECT Balance FROM", "datetime import time def process(process): bar=Tk() bar.title(f'Processing {process}') bar.geometry('200x100') progress=ttk.Progressbar(bar,length=100,mode='determinate') progress.grid(row=0,column=1,pady = 20)", "if found_sender: row_receiver=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID=?',[customer_id_receiver])) if len(row_receiver)>0: found_receiver=True if not", "pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window, text='Find Customer',font=('bold',12),command=Edit_details_continue) submit.grid(row=1,column=2,pady=5) def open_new_account(): new_account=Tk() new_account.title('Open New", "deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5)", "range(len(details)): details[i].insert(END,trim[i+1]) name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable", "WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your account details has been updates", "root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440') root.iconbitmap('anonymous.ico') root.configure(bg='grey') m=Label(text='Welcome to Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5)", "Exit.grid(row=4,column=5) #need to add exit and confirmation dialog root.mainloop() if __name__ == '__main__':", "cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the", "withdraw_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1)", "Money') Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer", "VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Deposit Request','Amount deposited Successfully') except: pass deposit_window.destroy()", "Contact=?, State=?, City=?, Pincode=?, Email=? WHERE Customer_ID=?', (name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),customer_id)) process('customer Update') try: messagebox.showinfo('Update Details','Your", "Label(deposit_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30)", "customer ID below to check balance in your account:', font=('bold',14), pady=10).grid(row=0,column=0,columnspan=2) #Heading cust_id=Entry(balance_window,", "integer, State text, City text, Pincode integer, Email text, Balance integer )''') customer.execute('SELECT", "try: messagebox.showerror('Receiver not fount','Customer not found with this customer ID') except Exception as", "messagebox.showinfo('New Account Opened',f\"Your account has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data", "found: employee.execute('SELECT * FROM Bank_Data') last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now();last_entry[2]+=amt employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl')", "customer with this customer ID\\nReturning to Menu') except Exception as e: pass transfer_window.destroy()", "messagebox.showinfo('Account Balance',f'Available Balance in your Account: Rs.{Balance[0]}') except: pass balance_window.destroy() return try: messagebox.showerror('No", "EXISTS Bank_Data( Date text, Customer_count integer, Transactions integer )''') employee.execute('SELECT * FROM Bank_Data')", "and confirmation dialog root.mainloop() if __name__ == '__main__': customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE", "name.grid(row=1,column=1) contact.grid(row=2,column=1) state.grid(row=3,column=1) city.grid(row=4,column=1) pincode.grid(row=5,column=1) email.grid(row=6,column=1) else: try: messagebox.showerror('No Customer found','Unable to find", "?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try: messagebox.showerror('No Customer found','Unable to find customer", "amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver])", "def deposit_continues(): found=False amt=int(amount.get()) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',(amt,cust_num)) found=True if found:", "submit.grid(row=0,column=2) def deposit(): deposit_window=Tk() #deposit window in tkinter deposit_window.title('Withdraw Money') Label(deposit_window ,text='Enter your", "INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!') except:", "row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else: try:", "deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1)", "cancelled due to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance():", "account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update your details',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=Edit_details) update.grid(row=3,column=1,pady=30) acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to", "#State Label state=Entry(new_account, text='',width=50) state.grid(row=3,column=1) Label(new_account, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(new_account, text='',width=50) city.grid(row=4,column=1) Label(new_account,", "customer_info WHERE Customer_ID=?',(customer_id,)) Balance=customer.fetchone() if Balance!=None: process('Balance Check') try: messagebox.showinfo('Account Balance',f'Available Balance in", "employee.execute('SELECT * FROM Bank_Data') records=employee.fetchall() if len(records)<1: employee.execute('INSERT INTO Bank_Data VALUES(0,0,0)') bank_data.commit() home()", "else: try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to", "employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Transfer') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Money Transferred Successfully!!!')", "cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM customer_info WHERE Customer_ID= ?',[cust_num])) if len(row)>0: amount_label.grid(row=1,column=0) amount.grid(row=1,column=1) else:", "last_entry=list(bank_record[-1]) cust_id=int(last_entry[1])+ 54610 def process(): new=(cust_id,name.get(),contact.get(),state.get(),city.get(),pincode.get(),email.get(),1000) customer.execute('INSERT INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account", "#City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email Label email=Entry(frame,fg='grey',width=50)", "except: pass else: try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient Balance') except:", "return try: messagebox.showerror('No Customer found','Unable to find customer with this customer ID\\nReturning to", "account has been created!!\\nCustomer ID: {new[0]}\") last_entry[0]=datetime.datetime.now();last_entry[1]+=1 employee.execute('INSERT INTO Bank_Data VALUES(?,?,?)',last_entry) bank_data.commit() new_account.destroy()", "this customer ID\\nReturning to Menu') except: pass update_window.destroy() return def Edit_details_continued(): customer.execute('UPDATE customer_info", "pass balance_window.destroy() return try: messagebox.showerror('No Customer found','Unable to find customer with this customer", "except Exception as e: pass transfer_window.destroy() return else: try: messagebox.showerror('No Customer found','Unable to", "ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0) #Amount Label Amount=Entry(transfer_window,) Amount.grid(row=1,column=1,ipadx=50,padx=10)", "Modern Bank',fg='White',bg='black',font=('bold',14),width=50) m.grid(row=0,column=1,columnspan=3,pady=5) new=Button(text='Apply for New Account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=open_new_account) new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50)", "your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window, text='Enter the amount').grid(row=1,column=0)", "ID Label cust_id=Entry(deposit_window ,text='Enter your Customer ID',width=30) #ask for the customer id cust_id.grid(row=0,column=1)", "receiver Label cust_id_receiver=Entry(transfer_window,) cust_id_receiver.grid(row=2,column=1,ipadx=50,padx=10) def transfer_continue(): customer_id_sender=cust_id_sender.get() customer_id_receiver=cust_id_receiver.get() amount=int(Amount.get()) found_sender=False found_receiver=False row_sender=list(customer.execute('SELECT *", "Label(frame, text='City',font=('bold',10)).grid(row=4,column=0,padx=5) #City Label city=Entry(frame,fg='grey',width=50) Label(frame, text='Pincode',font=('bold',10)).grid(row=5,column=0,padx=5) #Pincode Label pincode=Entry(frame,fg='grey',width=50) Label(frame, text='Email',font=('bold',10)).grid(row=6,column=0,padx=5) #Email", "Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE customer_info SET Balance=Balance+? WHERE Customer_ID=?',[amount,customer_id_receiver]) last_entry=list(employee.fetchall()[-1]) last_entry[0]=datetime.datetime.now() last_entry[2]=last_entry[2]+amount employee.execute('INSERT", "except: pass transfer_window.destroy() Button(transfer_window, text='Next',bg='Green', padx=10,pady=5,command=transfer_continue).grid(row=3,column=0,columnspan=2,ipadx=70,pady=5) def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window,", "City text, Pincode integer, Email text, Balance integer )''') customer.execute('SELECT * FROM customer_info')", "Details','Your account details has been updates Successfully!!') except: pass update_window.destroy() customer_data.commit() Button(frame,text='Submit',width=50,command=Edit_details_continued,bg='Green').grid(row=7,column=0,columnspan=2) submit=Button(update_window,", "new.grid(row=1,column=2,pady=30) withdraw_money=Button(text='Withdraw',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=withdraw) withdraw_money.grid(row=2,column=0,padx=50) check_balance=Button(text='Check Balance',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=balance) check_balance.grid(row=2,column=2,padx=30,pady=50) deposit_money=Button(text='Deposit Money to the account',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=deposit) deposit_money.grid(row=2,column=4) update=Button(text='Update", "bank_data.commit() new_account.destroy() Button(new_account,text='Submit',width=50,command=process).grid(row=7,column=0,columnspan=2) #submit Button def home(): root=Tk() root.title('Welcome to Anonymous Banking') root.geometry('1110x440')", "customer_info=['Customer ID','Name','Contact','State','City','Pincode','Email','Balance'] customer_data=sqlite3.connect(\"customer.db\") customer=customer_data.cursor() customer.execute('''CREATE TABLE IF NOT EXISTS customer_info( Customer_ID integer, Name", "#withdraw window in tkinter withdraw_window.title('Withdraw Money') Label(withdraw_window ,text='Enter your Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID", "INTO Bank_Data VALUES(?,?,?)',last_entry) process('Money Withdrawl') bank_data.commit() customer_data.commit() try: messagebox.showinfo('Withdraw Request','Amount withdrawen Successfully') except:", "Customer ID').grid(row=0,column=0,pady=10,padx=5) #Customer ID Label cust_id=Entry(withdraw_window ,text='Enter your Customer ID',width=30) #ask for the", "Transfer') Label(transfer_window, text='Enter your Customer ID',pady=5,padx=5).grid(row=0,column=0) #Customer ID sender Label cust_id_sender=Entry(transfer_window) cust_id_sender.grid(row=0,column=1,ipadx=50,padx=10) Label(transfer_window,", "try: messagebox.showerror('Insufficient Balance','Money Transfer cancelled due to Insufficient Balance') except: pass transfer_window.destroy() Button(transfer_window,", "transfer_window.destroy() return if found_receiver: if amount<=row_sender[0][7]: customer.execute('UPDATE customer_info SET Balance=Balance-? WHERE Customer_ID=?',[amount,customer_id_sender]) customer.execute('UPDATE", "cust_id.grid(row=0,column=1) amount_label=Label(deposit_window,text='Enter the amount to Deposit',font=('bold',10)) amount=Entry(deposit_window) def deposit_continue(): cust_num=cust_id.get() row=list(customer.execute('SELECT * FROM", "acc_transfer=Button(text='Transfer Money',bg='orange',fg='black',font=('bold',10),pady=10,padx=20,command=transfer) acc_transfer.grid(row=3,column=3,pady=30) Exit=Button(text='Exit',bg='black',fg='red',font=('bold',10),pady=6,padx=30,command=root.destroy,width=6) Exit.grid(row=4,column=5) #need to add exit and confirmation dialog root.mainloop()", "def balance(): balance_window=Tk() balance_window.title('Check Balance') balance_window.geometry('600x112') Label(balance_window, text='Drop your customer ID below to", "messagebox.showwarning('Warning',f'You must have Rs. {amount.get()} in your account') amt=int(amount.get()) if row[0][7]>=amt: customer.execute('UPDATE customer_info", "INTO customer_info VALUES(?,?,?,?,?,?,?,?)',new) customer_data.commit() messagebox.showinfo('New Account Opened',f\"Your account has been created!!\\nCustomer ID: {new[0]}\")" ]
[ "quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile)", "i in range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6],", "scan info of once would be be copied from 'tempList.csv' and be added", "'none' lists[row][18] = 'none' row += 1 #edition2 with open('tempList.csv', 'a+', newline='') as", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data =", "'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] = 'none'", "one scan has all been sent), the 'tempList.csv' would be refreshed with one", "'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i in range(1,517): data = ([", "print('0') return 'OK.' if __name__ == \"__main__\": #Use local host IP for local", "database) and be refreshed in 'oneTime.csv' (for check last time's scan info). Finally,", "= [[0 for col in range(19)] for row in range(APlength)] row = 0", "RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a',", "GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16]", "= 'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] =", "IP address of your own environment. (2) Then run this python file, A", "lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15]", "Firstly, modify the host IP address of your own environment. (2) Then run", "'Time']) for row in range(1,517): RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] =", "lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17]", "the mapping.csv, which contains 200 APs def checkAP(list, AP): row = 0 for", "SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX,", "Frequency = request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX", "SSID stored, encode mode is UTF-8 (as some SSID contains chinese characters) #edition", "== BSSID: RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] =", "ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all() for u", "ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users", "data (e.g. for signal level RSS it would be -110, magnetic field value", "row in reader] with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',',", "RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17]", "lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13]", "GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18] = Time", "row = 0 for AP in APs: lists[row][0] = AP lists[row][1] = 'none'", "Done == '1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1') else: print('0') return", "BSSID = request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y", "import Flask, request from app import db, models import csv import os #to", "RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile: spamwriter", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a', newline='') as csvfile:", "Time with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID',", "quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in users: data", "= 'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] =", "spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i in range(1,517): data =", "as tf from sklearn.preprocessing import scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time'])", "open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y',", "for i in range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5],", "get current path import importlib from model import * #algorithm part import pandas", "GeoZ, Model, Time): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS", "Frequency = list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx", "GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time'] SSID", "''' #Add RSS info into database whose name is app.db def addAPs(list): for", "once would be be copied from 'tempList.csv' and be added in 'xxx.csv'(which stores", "'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data", "= list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX =", "A temporary file called 'tempList.csv' will be initialized with default data (e.g. for", "mapping.csv, which contains 200 APs def checkAP(list, AP): row = 0 for row", "APlength = len(APs) lists = [[0 for col in range(19)] for row in", "#add one time's scanner result def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency,", "Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile)", "GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info from database def showAPs(num): ap =", "import tensorflow as tf from sklearn.preprocessing import scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\"", "GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none' row", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i in range(1,517):", "current path import importlib from model import * #algorithm part import pandas as", "for row in range(1,517): RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x", "for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5],", "([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11],", "info from database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y,", "RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv',", "Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level) if Done", "'none' lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16] = 'none'", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='') as", "and one symbol called \"Done\" would be set to '1' for last time,", "if RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3] = Floor", "= 'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 with open('tempList.csv',", "utf-8 from flask import Flask, request from app import db, models import csv", "= GeoZ RSS[row][17] = Model RSS[row][18] = Time with open('tempList.csv', 'w', newline='') as", "lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the input AP's BSSID", "file called 'tempList.csv' will be initialized with default data (e.g. for signal level", "#addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency,", "list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info from database def", "= models.User.query.all() for u in users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x,", "RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18]", "spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building =", "order according to the unchanged file 'APs.csv' (to store the AP info in", "= 'none' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] =", "info that is similar to database) and be refreshed in 'oneTime.csv' (for check", "from flask import Flask, request from app import db, models import csv import", "= request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX'] AccY =", "local host IP for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True)", "= request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz =", "= 'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] =", "def isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList is empty", "'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3],", "from app import db, models import csv import os #to get current path", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y,", "RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile:", "RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a',", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a', newline='')", "u = models.User(BSSID = list[row][0], SSID = list[row][1], Building = list[row][2], Floor =", "Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17] = Model", "database whose name is app.db def addAPs(list): for row in range(0,517): u =", "list[row][0]: return row return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX,", "GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info from database def showAPs(num): ap", "lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10]", "# coding: utf-8 from flask import Flask, request from app import db, models", "(as some SSID contains chinese characters) #edition def addAllCSV(): #whole database with open('APs.csv',", "Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r',", "time's transmission. ############################################################################################ ''' # coding: utf-8 from flask import Flask, request from", "RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty()", "AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13] = Level", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for", "BSSID: RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x", "request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y,", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time'])", "scan has all been sent), the 'tempList.csv' would be refreshed with one line", "Room, Location_x, Location_y, BSSID, Frequency, Level) if Done == '1': refreshCSV(Building, Floor, Location_x,", "be none) with order according to the unchanged file 'APs.csv' (to store the", "db, models import csv import os #to get current path import importlib from", "Level) if Done == '1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1') else:", "= 'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] =", "'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ lists[i][0], lists[i][1],", "APs that is detected once, then the transmission would be repeated 60 times", "request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX']", "ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all()", "if not os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level',", "to database) and be refreshed in 'oneTime.csv' (for check last time's scan info).", "refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='') as csvfile: reader =", "u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data)", "i in range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6],", "lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15]", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y,", "is UTF-8 (as some SSID contains chinese characters) #edition def addAllCSV(): #whole database", "RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12],", "Floor = list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX", "Location_y, Model) initializeTempList() print('1') else: print('0') return 'OK.' if __name__ == \"__main__\": #Use", "on FLASK micro framework, 1.Requirements: Python 3, Flask and relevant packages 2. How", "scan info). Finally, refresh 'tempList.csv' with default value for next time's transmission. ############################################################################################", "newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building',", "#Show all RSS info from database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID,", "It's a light server based on FLASK micro framework, 1.Requirements: Python 3, Flask", "with open('userinput.csv', 'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',',", "for col in range(19)] for row in range(APlength)] row = 0 for AP", "lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the input AP's BSSID is in", "row in range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6],", "row in range(0,517): u = models.User(BSSID = list[row][0], SSID = list[row][1], Building =", "row in range(APlength)] row = 0 for AP in APs: lists[row][0] = AP", "models.User(BSSID = list[row][0], SSID = list[row][1], Building = list[row][2], Floor = list[row][3], Location_x", "Time): with open('userinput.csv', 'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile,", "whose name is app.db def addAPs(list): for row in range(0,517): u = models.User(BSSID", "list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy = list[row][11],", "request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy']", "in range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7],", "RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST'])", "= 0 for row in range(0,517): if AP == list[row][0]: return row return", "newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0] for row in reader]", "as csvfile: #Check is tempList is empty if not os.path.getsize('./tempList.csv'): #file is empty", "AccZ = list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level", "lists[i][18] ]) spamwriter.writerow(data) #Check if the input AP's BSSID is in the mapping.csv,", "def addAllCSV(): #whole database with open('APs.csv', 'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'):", "lists[row][0] = AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4]", "\"__main__\": #Use local host IP for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True)", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room,", "(assume there are 60 APs that is detected once, then the transmission would", "SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level,", "file 'APs.csv' (to store the AP info in a defined order) Each time", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x,", "= GeoY RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18] = Time with open('tempList.csv',", "print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='') as", "level RSS it would be -110, magnetic field value would be none) with", "RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15],", "([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11],", "AP == list[row][0]: return row return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y,", "unchanged file 'APs.csv' (to store the AP info in a defined order) Each", "sent), the 'tempList.csv' would be refreshed with one line of AP's info. After", "csv import os #to get current path import importlib from model import *", "u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ])", "order) Each time one complete info of AP arrival, (assume there are 60", "Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8] = AccY", "lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data)", "result def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into database whose name is app.db def", "as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level',", "= request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ =", "last time's scan info). Finally, refresh 'tempList.csv' with default value for next time's", "= 'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 #edition2 with", "AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model", "'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ',", "lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12],", "'GeoZ', 'Model', 'Time']) #edition4 for i in range(1,517): data = ([ RSS[i][0], RSS[i][1],", "for i in range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5],", "request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX']", "import importlib from model import * #algorithm part import pandas as pdb import", "ap.GeoZ) def deleteDB(): users = models.User.query.all() for u in users: db.session.delete(u) db.session.commit() '''", "Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time", "'xxx.csv'(which stores all info that is similar to database) and be refreshed in", "import pandas as pdb import numpy as np import tensorflow as tf from", "3, Flask and relevant packages 2. How does this work? (1) Firstly, modify", "(2) Then run this python file, A temporary file called 'tempList.csv' will be", "lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9]", "spamwriter.writerow(data) #add one time's scanner result def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y,", "csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ'])", "By Zhenghang(<NAME> ############################################################################################ It's a light server based on FLASK micro framework, 1.Requirements:", "time one complete info of AP arrival, (assume there are 60 APs that", "'tempList.csv' would be refreshed with one line of AP's info. After 60 times", "#file not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model',", "#edition3 for row in range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4],", "lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the input", "RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14],", "range(0,517): if AP == list[row][0]: return row return 'none' def tempList(BSSID,SSID, Building, Floor,", "def addAPs(list): for row in range(0,517): u = models.User(BSSID = list[row][0], SSID =", "lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12]", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv',", "post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor']", "AP's info. After 60 times (AP number), the function 'refreshCSV()' would be called.", "lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17]", "address of your own environment. (2) Then run this python file, A temporary", "list[row][0], SSID = list[row][1], Building = list[row][2], Floor = list[row][3], Location_x = list[row][4],", "'1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1') else: print('0') return 'OK.' if", "stored, encode mode is UTF-8 (as some SSID contains chinese characters) #edition def", "not os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level',", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data =", "open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y',", "Location_y, Model): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS =", "for last time, which means info of one scan has all been sent),", "refreshed with one line of AP's info. After 60 times (AP number), the", "u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter", "range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8],", "in range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7],", "Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX,", "one symbol called \"Done\" would be set to '1' for last time, which", "FLASK micro framework, 1.Requirements: Python 3, Flask and relevant packages 2. How does", "once, then the transmission would be repeated 60 times and one symbol called", "Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX'] AccY", "'-110' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none'", "spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty", "csvfile: reader = csv.reader(csvfile) RSS = [row for row in reader] with open('tempList.csv',", "RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17],", "addAllCSV(): #whole database with open('APs.csv', 'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter", "= request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done =", "lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv',", "RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13]", "Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level", "(for check last time's scan info). Finally, refresh 'tempList.csv' with default value for", "= list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency =", "ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ)", "AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building,", "and relevant packages 2. How does this work? (1) Firstly, modify the host", "with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID',", "RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',',", "'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row in range(1,517): RSS[row][2] = Building RSS[row][3]", "for row in reader] APlength = len(APs) lists = [[0 for col in", "plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode # create the application object app", "for row in range(0,517): u = models.User(BSSID = list[row][0], SSID = list[row][1], Building", "return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building',", "200 APs def checkAP(list, AP): row = 0 for row in range(0,517): if", "+= 1 #edition2 with open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList is", "RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter", "in 'oneTime.csv' (for check last time's scan info). Finally, refresh 'tempList.csv' with default", "open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0] for row", "SSID contains chinese characters) #edition def addAllCSV(): #whole database with open('APs.csv', 'w', newline='')", "0 for row in range(0,517): if AP == list[row][0]: return row return 'none'", "#edition2 with open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if", "Frequency RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10] = ORIx", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor,", "info). Finally, refresh 'tempList.csv' with default value for next time's transmission. ############################################################################################ '''", "be called. Then scan info of once would be be copied from 'tempList.csv'", "AccX RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy", "'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 with open('tempList.csv', 'w',", "RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with", "all info that is similar to database) and be refreshed in 'oneTime.csv' (for", "called. Then scan info of once would be be copied from 'tempList.csv' and", "= 'none' row += 1 #edition2 with open('tempList.csv', 'a+', newline='') as csvfile: #Check", "ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all() for", "'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2],", "'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 #edition2 with open('tempList.csv',", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r', newline='') as csvfile:", "u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter =", "RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6]", "GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "= list[row][1], Building = list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y =", "col in range(19)] for row in range(APlength)] row = 0 for AP in", "AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time ])", "python file, A temporary file called 'tempList.csv' will be initialized with default data", "spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data", "range(APlength)] row = 0 for AP in APs: lists[row][0] = AP lists[row][1] =", "'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level',", "= list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS", "'Model', 'Time']) for row in range(1,517): RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4]", "as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY',", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x,", "ORIz RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ", "of once would be be copied from 'tempList.csv' and be added in 'xxx.csv'(which", "\"Done\" would be set to '1' for last time, which means info of", "then the transmission would be repeated 60 times and one symbol called \"Done\"", "'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID, Building, Floor,", "ORIx = list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14],", "import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode # create the", "reader] APlength = len(APs) lists = [[0 for col in range(19)] for row", "host IP for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) '''", "Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS", "Flask, request from app import db, models import csv import os #to get", "csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ',", "Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID,", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r',", "RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13],", "if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level',", "request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX']", "= request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y =", "RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13],", "for signal level RSS it would be -110, magnetic field value would be", "u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner", "coding: utf-8 from flask import Flask, request from app import db, models import", "'none' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none'", "as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x',", "= 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12] =", "Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7] = AccX", "#addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "open('APs.csv', 'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "in reader] APlength = len(APs) lists = [[0 for col in range(19)] for", "'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] = 'none'", "# create the application object app = Flask(__name__) #edition # Write all info", "function 'refreshCSV()' would be called. Then scan info of once would be be", "info in a defined order) Each time one complete info of AP arrival,", "Time = request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ", "= Model RSS[row][18] = Time with open('tempList.csv', 'w', newline='') as csvfile: spamwriter =", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model',", "light server based on FLASK micro framework, 1.Requirements: Python 3, Flask and relevant", "= request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID,", "added in 'xxx.csv'(which stores all info that is similar to database) and be", "RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15],", "= 'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] =", "RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty()", "spamwriter.writerow(data) #Check if the input AP's BSSID is in the mapping.csv, which contains", "RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18] = Time with open('tempList.csv', 'w', newline='')", "import * #algorithm part import pandas as pdb import numpy as np import", "'none' lists[row][18] = 'none' row += 1 with open('tempList.csv', 'w', newline='') as csvfile:", "#print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2]", "ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def", "row in range(0,517): if AP == list[row][0]: return row return 'none' def tempList(BSSID,SSID,", "GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY = list[row][8],", "]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5", "= Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8] =", "lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17],", "RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12]", "object app = Flask(__name__) #edition # Write all info in DB into a", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile,", "time, which means info of one scan has all been sent), the 'tempList.csv'", "refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1') else: print('0') return 'OK.' if __name__", "RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14]", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency,", "list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level = list[row][13],", "def initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs =", "data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9],", "local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info", "Building = request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency", "RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+', newline='')", "is tempList is empty if not os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile,", "'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13] = 'none'", "empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4", "open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./tempList.csv'):", "spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID,", "def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "none) with order according to the unchanged file 'APs.csv' (to store the AP", "RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv',", "methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building'] Floor", "#Check is tempList is empty if not os.path.getsize('./tempList.csv'): #file is empty spamwriter =", "(AP number), the function 'refreshCSV()' would be called. Then scan info of once", "RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x)", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result def addCSV(BSSID, SSID,", "row in range(1,517): if RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2] = Building", "GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16],", "import csv import os #to get current path import importlib from model import", "if Done == '1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1') else: print('0')", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV()", "lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16]", "Model, Time): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS =", "'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data = ([", "GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "as csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "in reader] with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "= list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level =", "last time, which means info of one scan has all been sent), the", "open('xxx.csv', 'a', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row", "'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] = 'none'", "= 'none' lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15] =", "been sent), the 'tempList.csv' would be refreshed with one line of AP's info.", "'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile,", "'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1", "create the application object app = Flask(__name__) #edition # Write all info in", "GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "open('oneTime.csv', 'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter =", "= '-110' lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] =", "that is similar to database) and be refreshed in 'oneTime.csv' (for check last", "and be refreshed in 'oneTime.csv' (for check last time's scan info). Finally, refresh", "#app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into database whose name is", "request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY']", "newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX',", "based on FLASK micro framework, 1.Requirements: Python 3, Flask and relevant packages 2.", "lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r',", "be set to '1' for last time, which means info of one scan", "'oneTime.csv' (for check last time's scan info). Finally, refresh 'tempList.csv' with default value", "= Level RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17] =", "GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX = list[row][7],", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for", "as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data", "= request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX'] AccY =", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result def addCSV(BSSID,", "Level, GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a', newline='') as csvfile: if", "'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] = 'none'", "= '-110' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] =", "'none' lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none'", "def refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='') as csvfile: reader", "be -110, magnetic field value would be none) with order according to the", "of AP's info. After 60 times (AP number), the function 'refreshCSV()' would be", "= 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13] =", "of AP arrival, (assume there are 60 APs that is detected once, then", "lists[row][18] = 'none' row += 1 #edition2 with open('tempList.csv', 'a+', newline='') as csvfile:", "#isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor'] Location_x", "= [row[0] for row in reader] APlength = len(APs) lists = [[0 for", "without SSID stored, encode mode is UTF-8 (as some SSID contains chinese characters)", "according to the unchanged file 'APs.csv' (to store the AP info in a", "db.session.commit() #Show all RSS info from database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID,", "= request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x,", "u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile,", "models.User.query.all() for u in users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y,", "AP in APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3]", "scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode # create", "= GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18] =", "chinese characters) #edition def addAllCSV(): #whole database with open('APs.csv', 'w', newline='') as csvfile:", "#addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level) if Done == '1': refreshCSV(Building, Floor,", "Floor, Location_x, Location_y, Model) initializeTempList() print('1') else: print('0') return 'OK.' if __name__ ==", "temporary file called 'tempList.csv' will be initialized with default data (e.g. for signal", "RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile:", "AP): row = 0 for row in range(0,517): if AP == list[row][0]: return", "#file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model',", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time'])", "#Add RSS info into database whose name is app.db def addAPs(list): for row", "into database whose name is app.db def addAPs(list): for row in range(0,517): u", "''' By Zhenghang(<NAME> ############################################################################################ It's a light server based on FLASK micro framework,", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building,", "= Location_y RSS[row][17] = Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4],", "all RSS info from database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building,", "row += 1 with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',',", "quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([", "Location_x RSS[row][5] = Location_y RSS[row][17] = Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2],", "quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building", "RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',',", "RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile: spamwriter = csv.writer(csvfile,", "#addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX,", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile:", "RSS[row][18] = Time with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',',", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv',", "initializeTempList() print('1') else: print('0') return 'OK.' if __name__ == \"__main__\": #Use local host", "2. How does this work? (1) Firstly, modify the host IP address of", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model',", "RSS info into database whose name is app.db def addAPs(list): for row in", "csvfile: reader = csv.reader(csvfile) APs = [row[0] for row in reader] APlength =", "= 0 for AP in APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2]", "addAPs(list): for row in range(0,517): u = models.User(BSSID = list[row][0], SSID = list[row][1],", "'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the", "relevant packages 2. How does this work? (1) Firstly, modify the host IP", "lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 #edition2", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u", "arrival, (assume there are 60 APs that is detected once, then the transmission", "row return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv',", "([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz,", "APs def checkAP(list, AP): row = 0 for row in range(0,517): if AP", "= 'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] =", "quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='') as csvfile:", "signal level RSS it would be -110, magnetic field value would be none)", "if AP == list[row][0]: return row return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x,", "= models.User(BSSID = list[row][0], SSID = list[row][1], Building = list[row][2], Floor = list[row][3],", "lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if", "''' # coding: utf-8 from flask import Flask, request from app import db,", "'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row in range(1,517): RSS[row][2] =", "= request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx =", "'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none'", "time's scan info). Finally, refresh 'tempList.csv' with default value for next time's transmission.", "GeoZ RSS[row][17] = Model RSS[row][18] = Time with open('tempList.csv', 'w', newline='') as csvfile:", "lists[row][17] = 'none' lists[row][18] = 'none' row += 1 with open('tempList.csv', 'w', newline='')", "GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID'] AccX", "ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all() for u in users: db.session.delete(u)", "Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "from database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency,", "RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as", "not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level',", "Level, GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r', newline='') as csvfile: reader", "as csvfile: reader = csv.reader(csvfile) RSS = [row for row in reader] with", "tempList is empty if not os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',',", "tensorflow as tf from sklearn.preprocessing import scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set", "u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result def", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data = ([ RSS[row][0], RSS[row][1],", "as csvfile: reader = csv.reader(csvfile) APs = [row[0] for row in reader] APlength", "csvfile: #Check is tempList is empty if not os.path.getsize('./xxx.csv'): #file not established spamwriter", "'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2],", "lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the input AP's BSSID is", "'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([", "request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y']", "RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18]", "quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in", "range(1,517): if RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3] =", "spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row in range(1,517):", "Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "store the AP info in a defined order) Each time one complete info", "#addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX,", "be refreshed with one line of AP's info. After 60 times (AP number),", "modify the host IP address of your own environment. (2) Then run this", "u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level,", "SSID = request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx", "== \"__main__\": #Use local host IP for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx',", "= 'none' lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14] =", "Location_x = list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a',", "= Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7],", "lists[row][18] = 'none' row += 1 with open('tempList.csv', 'w', newline='') as csvfile: spamwriter", "sklearn.preprocessing import scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode", "RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7]", "and be added in 'xxx.csv'(which stores all info that is similar to database)", "ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all() for u in", "RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data)", "request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done']", "Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "#to get current path import importlib from model import * #algorithm part import", "'Time']) for i in range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4],", "u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result", "= 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none' row +=", "import numpy as np import tensorflow as tf from sklearn.preprocessing import scale import", "Flask(__name__) #edition # Write all info in DB into a csv file, without", "part import pandas as pdb import numpy as np import tensorflow as tf", "mode is UTF-8 (as some SSID contains chinese characters) #edition def addAllCSV(): #whole", "all info in DB into a csv file, without SSID stored, encode mode", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i", "break def isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList is", "i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6],", "= [row for row in reader] #print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0]", "RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5]", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data", "range(1,517): RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y", "Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x,", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in", "'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data = ([ lists[i][0],", "range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8],", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y,", "isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if", "RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'):", "would be -110, magnetic field value would be none) with order according to", "file, without SSID stored, encode mode is UTF-8 (as some SSID contains chinese", "Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv',", "= 'none' lists[row][18] = 'none' row += 1 #edition2 with open('tempList.csv', 'a+', newline='')", "the host IP address of your own environment. (2) Then run this python", "'tempList.csv' and be added in 'xxx.csv'(which stores all info that is similar to", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in users:", "[row for row in reader] #print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0] ==", "a light server based on FLASK micro framework, 1.Requirements: Python 3, Flask and", "open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row for row", "RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9]", "([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11],", "import scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode #", "for row in range(1,517): if RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2] =", "GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='')", "reader] #print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0] == BSSID: RSS[row][1] = SSID", "'-110' lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none'", "with open('APs.csv', 'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',',", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r', newline='')", "this work? (1) Firstly, modify the host IP address of your own environment.", "@app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building']", "Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time):", "the utf-8 encode mode # create the application object app = Flask(__name__) #edition", "= Frequency RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10] =", "AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] = 'none'", "be initialized with default data (e.g. for signal level RSS it would be", "RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12],", "with open('xxx.csv', 'a', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for", "if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users", "'Model','Time']) for i in range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4],", "lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x,", "is empty if not os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building,", "= 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13] =", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor,", "RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def", "from model import * #algorithm part import pandas as pdb import numpy as", "'Model', 'Time']) with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs =", "RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12],", "AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID,", "len(APs) lists = [[0 for col in range(19)] for row in range(APlength)] row", "range(0,517): u = models.User(BSSID = list[row][0], SSID = list[row][1], Building = list[row][2], Floor", "= 'none' row += 1 with open('tempList.csv', 'w', newline='') as csvfile: spamwriter =", "request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model']", "'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ RSS[i][0],", "RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13],", "= csv.reader(csvfile) APs = [row[0] for row in reader] APlength = len(APs) lists", "copied from 'tempList.csv' and be added in 'xxx.csv'(which stores all info that is", "with open('oneTime.csv', 'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter", "of one scan has all been sent), the 'tempList.csv' would be refreshed with", "pandas as pdb import numpy as np import tensorflow as tf from sklearn.preprocessing", "newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ',", "request from app import db, models import csv import os #to get current", "#edition4 for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4],", "packages 2. How does this work? (1) Firstly, modify the host IP address", "= 'none' lists[row][18] = 'none' row += 1 with open('tempList.csv', 'w', newline='') as", "users = models.User.query.all() for u in users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor,", "csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ',", "newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row for row in reader]", "= request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level =", "mode # create the application object app = Flask(__name__) #edition # Write all", "= list[row][0], SSID = list[row][1], Building = list[row][2], Floor = list[row][3], Location_x =", "RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "for AP in APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2] = 'none'", "newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',',", "called \"Done\" would be set to '1' for last time, which means info", "request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY']", "'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row in range(1,517): RSS[row][2]", "ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY,", "models import csv import os #to get current path import importlib from model", "= list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx =", "the AP info in a defined order) Each time one complete info of", "RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14],", "GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY,", "be repeated 60 times and one symbol called \"Done\" would be set to", "is empty if not os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "pdb import numpy as np import tensorflow as tf from sklearn.preprocessing import scale", "in range(1,517): RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] =", "RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter =", "for row in reader] #print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0] == BSSID:", "]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add", "'Time']) for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4],", "'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none'", "not os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ',", "it would be -110, magnetic field value would be none) with order according", "= AccY RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12] =", "for row in range(APlength)] row = 0 for AP in APs: lists[row][0] =", "'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13] = '-110'", "GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with", "RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17],", "= request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency =", "RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11]", "'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in users: data = ([u.BSSID, u.SSID, u.Buidling,", "Model): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row", "Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "row += 1 #edition2 with open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList", "'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID, Building, Floor, Location_x,", "'none' lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14] = 'none'", "lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6]", "newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./xxx.csv'): #file not", "]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile: #Check is", "APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] = 'none'", "refreshed in 'oneTime.csv' (for check last time's scan info). Finally, refresh 'tempList.csv' with", "if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY',", "list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz = list[row][12],", "RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18]", "'a+', newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./tempList.csv'): #file", "is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for", "quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3],", "RSS it would be -110, magnetic field value would be none) with order", "IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into database whose", "model import * #algorithm part import pandas as pdb import numpy as np", "importlib from model import * #algorithm part import pandas as pdb import numpy", "newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building',", "'tempList.csv' will be initialized with default data (e.g. for signal level RSS it", "#addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID,", "flask import Flask, request from app import db, models import csv import os", "os #to get current path import importlib from model import * #algorithm part", "return 'OK.' if __name__ == \"__main__\": #Use local host IP for local server", "Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level)", "does this work? (1) Firstly, modify the host IP address of your own", "else: print('0') return 'OK.' if __name__ == \"__main__\": #Use local host IP for", "default data (e.g. for signal level RSS it would be -110, magnetic field", "'refreshCSV()' would be called. Then scan info of once would be be copied", "csv file, without SSID stored, encode mode is UTF-8 (as some SSID contains", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data = ([", "lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16]", "spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList", "u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level,", "checkAP(list, AP): row = 0 for row in range(0,517): if AP == list[row][0]:", "scanner result def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "= ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10],", "in range(0,517): u = models.User(BSSID = list[row][0], SSID = list[row][1], Building = list[row][2],", "info of once would be be copied from 'tempList.csv' and be added in", "Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY,", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency,", "= 'none' lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] =", "'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID, Building, Floor, Location_x, Location_y,", "Location_y = list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ", "newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517):", "list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show", "'tempList.csv' with default value for next time's transmission. ############################################################################################ ''' # coding: utf-8", "for row in reader] with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile,", "quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row in", "Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor,", "in a defined order) Each time one complete info of AP arrival, (assume", "RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv',", "Location_y, BSSID, Frequency, Level) if Done == '1': refreshCSV(Building, Floor, Location_x, Location_y, Model)", "RSS = [row for row in reader] #print(RSS,RSS[0][0]) for row in range(1,517): if", "be added in 'xxx.csv'(which stores all info that is similar to database) and", "'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([", "which means info of one scan has all been sent), the 'tempList.csv' would", "RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16],", "= [row for row in reader] with open('tempList.csv', 'w', newline='') as csvfile: spamwriter", "AccY = list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz", "Each time one complete info of AP arrival, (assume there are 60 APs", "'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in users: data = ([u.BSSID,", "contains chinese characters) #edition def addAllCSV(): #whole database with open('APs.csv', 'w', newline='') as", "ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX,", "== list[row][0]: return row return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency,", "lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14]", "list[row][1], Building = list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y = list[row][5],", "is detected once, then the transmission would be repeated 60 times and one", "GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level)", "of your own environment. (2) Then run this python file, A temporary file", "= csv.reader(csvfile) RSS = [row for row in reader] with open('tempList.csv', 'w', newline='')", "csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y',", "PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode # create the application object app =", "= Time with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "would be repeated 60 times and one symbol called \"Done\" would be set", "reader = csv.reader(csvfile) RSS = [row for row in reader] #print(RSS,RSS[0][0]) for row", "Frequency, Level) if Done == '1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1')", "time's scanner result def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY,", "'1' for last time, which means info of one scan has all been", "number), the function 'refreshCSV()' would be called. Then scan info of once would", "60 times (AP number), the function 'refreshCSV()' would be called. Then scan info", "'Model', 'Time']) for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3],", "'Time']) with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0]", "import db, models import csv import os #to get current path import importlib", "with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0] for", "lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i)", "a csv file, without SSID stored, encode mode is UTF-8 (as some SSID", "for row in range(0,517): if AP == list[row][0]: return row return 'none' def", "= Location_y RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9] =", "Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level) if Done == '1':", "BSSID, Frequency, Level) if Done == '1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList()", "RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4]", "RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile: if", "os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model',", "environment. (2) Then run this python file, A temporary file called 'tempList.csv' will", "info of AP arrival, (assume there are 60 APs that is detected once,", "u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX,", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y,", "Model) initializeTempList() print('1') else: print('0') return 'OK.' if __name__ == \"__main__\": #Use local", "characters) #edition def addAllCSV(): #whole database with open('APs.csv', 'w', newline='') as csvfile: if", "has all been sent), the 'tempList.csv' would be refreshed with one line of", "request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY,", "#set the utf-8 encode mode # create the application object app = Flask(__name__)", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517):", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in", "are 60 APs that is detected once, then the transmission would be repeated", "application object app = Flask(__name__) #edition # Write all info in DB into", "info into database whose name is app.db def addAPs(list): for row in range(0,517):", "RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17] = Model x", "file, A temporary file called 'tempList.csv' will be initialized with default data (e.g.", "in range(1,517): if RSS[row][0] == BSSID: RSS[row][1] = SSID RSS[row][2] = Building RSS[row][3]", "= list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit()", "RSS[row][17] = Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6],", "os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ',", "GeoZ, Model, Time): with open('userinput.csv', 'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter", "Level, GeoX, GeoY, GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data)", "SSID RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y", "csvfile: if not os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x',", "RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ])", "newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz',", "tempList is empty if not os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile, delimiter=',',", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([", "request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time']", "for next time's transmission. ############################################################################################ ''' # coding: utf-8 from flask import Flask,", "refresh 'tempList.csv' with default value for next time's transmission. ############################################################################################ ''' # coding:", "'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row in range(1,517): RSS[row][2] = Building", "'none' lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16] = 'none'", "Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY,", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i in", "encode mode # create the application object app = Flask(__name__) #edition # Write", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data", "RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ])", "GeoY RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18] = Time with open('tempList.csv', 'w',", "'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX,", "#Check if the input AP's BSSID is in the mapping.csv, which contains 200", "GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a', newline='') as csvfile: if not", "RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+', newline='') as", "'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs", "for row in range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5],", "lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def", "APs = [row[0] for row in reader] APlength = len(APs) lists = [[0", "is similar to database) and be refreshed in 'oneTime.csv' (for check last time's", "request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID']", "be refreshed in 'oneTime.csv' (for check last time's scan info). Finally, refresh 'tempList.csv'", "not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users =", "range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8],", "Python 3, Flask and relevant packages 2. How does this work? (1) Firstly,", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency,", "newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./tempList.csv'): #file is", "server based on FLASK micro framework, 1.Requirements: Python 3, Flask and relevant packages", "empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i", "Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID,", "framework, 1.Requirements: Python 3, Flask and relevant packages 2. How does this work?", "lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model): with", "as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode # create the application object", "tf from sklearn.preprocessing import scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8", "]) spamwriter.writerow(data) #Check if the input AP's BSSID is in the mapping.csv, which", "'GeoY', 'GeoZ', 'Model', 'Time']) for row in range(1,517): RSS[row][2] = Building RSS[row][3] =", "AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ", "Model RSS[row][18] = Time with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile,", "os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all()", "return row return 'none' def tempList(BSSID,SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY,", "Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX,", "How does this work? (1) Firstly, modify the host IP address of your", "Then run this python file, A temporary file called 'tempList.csv' will be initialized", "as pdb import numpy as np import tensorflow as tf from sklearn.preprocessing import", "as csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x',", "Level RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17] = Model", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) tempList(BSSID, SSID,Building,", "open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./xxx.csv'):", "[[0 for col in range(19)] for row in range(APlength)] row = 0 for", "quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517):", "1.Requirements: Python 3, Flask and relevant packages 2. How does this work? (1)", "csv.reader(csvfile) APs = [row[0] for row in reader] APlength = len(APs) lists =", "RSS = [row for row in reader] with open('tempList.csv', 'w', newline='') as csvfile:", "in range(APlength)] row = 0 for AP in APs: lists[row][0] = AP lists[row][1]", "own environment. (2) Then run this python file, A temporary file called 'tempList.csv'", "for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS", "'GeoZ', 'Model', 'Time']) for row in range(1,517): RSS[row][2] = Building RSS[row][3] = Floor", "u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one", "spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='') as csvfile: reader", "u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',',", "as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level',", "lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18] = 'none' row += 1 with", "to the unchanged file 'APs.csv' (to store the AP info in a defined", "lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y,", "= Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17] =", "in 'xxx.csv'(which stores all info that is similar to database) and be refreshed", "with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row for", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result def addCSV(BSSID, SSID, Building,", "u.Level, u.GeoX, u.GeoY, u.GeoZ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's", "'none' lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15] = 'none'", "= 'none' lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16] =", "info in DB into a csv file, without SSID stored, encode mode is", "lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14],", "GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time", "Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level,", "some SSID contains chinese characters) #edition def addAllCSV(): #whole database with open('APs.csv', 'w',", "'Model', 'Time']) #edition4 for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2],", "db.session.add(u) db.session.commit() #Show all RSS info from database def showAPs(num): ap = models.User.query.get(num)", "data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9],", "debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into database whose name is app.db", "field value would be none) with order according to the unchanged file 'APs.csv'", "stores all info that is similar to database) and be refreshed in 'oneTime.csv'", "quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result def addCSV(BSSID, SSID, Building, Floor, Location_x,", "'OK.' if __name__ == \"__main__\": #Use local host IP for local server #Or", "= list[row][4], Location_y = list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY =", "= 'none' lists[row][12] = 'none' lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15] =", "DB into a csv file, without SSID stored, encode mode is UTF-8 (as", "= len(APs) lists = [[0 for col in range(19)] for row in range(APlength)]", "After 60 times (AP number), the function 'refreshCSV()' would be called. Then scan", "RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18]", "in range(0,517): if AP == list[row][0]: return row return 'none' def tempList(BSSID,SSID, Building,", "value for next time's transmission. ############################################################################################ ''' # coding: utf-8 from flask import", "'none' row += 1 #edition2 with open('tempList.csv', 'a+', newline='') as csvfile: #Check is", "ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14] = GeoX", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in", "in users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY,", "if not os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz',", "would be none) with order according to the unchanged file 'APs.csv' (to store", "ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all() for u in users:", "= Flask(__name__) #edition # Write all info in DB into a csv file,", "Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17] = Model x = ([", "RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty():", "with open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if not", "RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile: if not", "__name__ == \"__main__\": #Use local host IP for local server #Or IPV4 address", "RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15],", "lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ])", "request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx']", "means info of one scan has all been sent), the 'tempList.csv' would be", "ORIy RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15] = GeoY", "SSID = list[row][1], Building = list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y", "############################################################################################ It's a light server based on FLASK micro framework, 1.Requirements: Python 3,", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time ]) spamwriter =", "= request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX,", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x,", "initialized with default data (e.g. for signal level RSS it would be -110,", "times and one symbol called \"Done\" would be set to '1' for last", "lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15],", "with default data (e.g. for signal level RSS it would be -110, magnetic", "run this python file, A temporary file called 'tempList.csv' will be initialized with", "micro framework, 1.Requirements: Python 3, Flask and relevant packages 2. How does this", "#edition # Write all info in DB into a csv file, without SSID", "= 'none' lists[row][13] = '-110' lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16] =", "RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+', newline='') as csvfile: #Check", "= AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13] =", "would be refreshed with one line of AP's info. After 60 times (AP", "def checkAP(list, AP): row = 0 for row in range(0,517): if AP ==", "'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row for row in", "Flask and relevant packages 2. How does this work? (1) Firstly, modify the", "GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'):", "= ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy,", "= ([ BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='') as csvfile: reader =", "= Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7] =", "from sklearn.preprocessing import scale import matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode", "the input AP's BSSID is in the mapping.csv, which contains 200 APs def", "= SSID RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] =", "default value for next time's transmission. ############################################################################################ ''' # coding: utf-8 from flask", "= AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] =", "app import db, models import csv import os #to get current path import", "BSSID is in the mapping.csv, which contains 200 APs def checkAP(list, AP): row", "lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building,", "RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with", "= models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy,", "GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r', newline='') as csvfile: reader =", "addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "= request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time'] SSID =", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time'])", "== '1': refreshCSV(Building, Floor, Location_x, Location_y, Model) initializeTempList() print('1') else: print('0') return 'OK.'", "lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) print(i) spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model):", "GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info from database def showAPs(num):", "spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs", "host IP address of your own environment. (2) Then run this python file,", "def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i", "in APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] =", "Model, Time): with open('userinput.csv', 'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter =", "'Model','Time']) data = ([ BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY,", "RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17],", "= ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10],", "60 APs that is detected once, then the transmission would be repeated 60", "Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level) if Done == '1': refreshCSV(Building,", "ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all", "similar to database) and be refreshed in 'oneTime.csv' (for check last time's scan", "work? (1) Firstly, modify the host IP address of your own environment. (2)", "RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as", "[row for row in reader] with open('tempList.csv', 'w', newline='') as csvfile: spamwriter =", "lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13],", "ORIy = list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u)", "AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy", "request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ']", "spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517):", "RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17]", "lists[row][1] = 'none' lists[row][2] = 'none' lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5]", "= csv.reader(csvfile) RSS = [row for row in reader] #print(RSS,RSS[0][0]) for row in", "= ORIz RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15] = GeoY RSS[row][16] =", "Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ,", "(1) Firstly, modify the host IP address of your own environment. (2) Then", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for row", "called 'tempList.csv' will be initialized with default data (e.g. for signal level RSS", "is in the mapping.csv, which contains 200 APs def checkAP(list, AP): row =", "symbol called \"Done\" would be set to '1' for last time, which means", "row in range(1,517): RSS[row][2] = Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5]", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x,", "'Time']) #edition4 for i in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3],", "ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y,", "Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY,", "request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "AccX = list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy", "request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building,", "'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14] = '-110'", "GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building,", "= Location_x RSS[row][5] = Location_y RSS[row][17] = Model x = ([ RSS[row][0], RSS[row][1],", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency", "'a', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in", "= request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model =", "contains 200 APs def checkAP(list, AP): row = 0 for row in range(0,517):", "the function 'refreshCSV()' would be called. Then scan info of once would be", "csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ',", "lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12] = 'none' lists[row][13]", "in DB into a csv file, without SSID stored, encode mode is UTF-8", "RSS info from database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x,", "line of AP's info. After 60 times (AP number), the function 'refreshCSV()' would", "csv.reader(csvfile) RSS = [row for row in reader] with open('tempList.csv', 'w', newline='') as", "established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with", "= request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level'] AccX =", "csvfile: #Check is tempList is empty if not os.path.getsize('./tempList.csv'): #file is empty spamwriter", "server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX,", "Finally, refresh 'tempList.csv' with default value for next time's transmission. ############################################################################################ ''' #", "RSS[row][15] = GeoY RSS[row][16] = GeoZ RSS[row][17] = Model RSS[row][18] = Time with", "utf-8 encode mode # create the application object app = Flask(__name__) #edition #", "spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3", "request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency']", "AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID,", "= request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY =", "= request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ'] Model = request.form['Model'] Time =", "Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info from", "Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time)", "#file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time'])", "RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data)", "AP's BSSID is in the mapping.csv, which contains 200 APs def checkAP(list, AP):", "your own environment. (2) Then run this python file, A temporary file called", "= Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17] = Model x =", "models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz,", "* #algorithm part import pandas as pdb import numpy as np import tensorflow", "Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY,", "lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the input AP's BSSID is in the", "RSS[row][5] = Location_y RSS[row][17] = Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3],", "ap.GeoY, ap.GeoZ) def deleteDB(): users = models.User.query.all() for u in users: db.session.delete(u) db.session.commit()", "with open('xxx.csv', 'a+', newline='') as csvfile: #Check is tempList is empty if not", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID =", "lists[row][14] = '-110' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18]", "'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data =", "= ORIy RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15] =", "showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ,", "#Use local host IP for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx',", "os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ',", "ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB(): users =", "as csvfile: reader = csv.reader(csvfile) RSS = [row for row in reader] #print(RSS,RSS[0][0])", "in the mapping.csv, which contains 200 APs def checkAP(list, AP): row = 0", "transmission would be repeated 60 times and one symbol called \"Done\" would be", "matplotlib.pyplot as plt PYTHONIOENCODING=\"UTF-8\" #set the utf-8 encode mode # create the application", "as np import tensorflow as tf from sklearn.preprocessing import scale import matplotlib.pyplot as", "delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2],", "RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14],", "with default value for next time's transmission. ############################################################################################ ''' # coding: utf-8 from", "tempList(BSSID, SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level,", "is app.db def addAPs(list): for row in range(0,517): u = models.User(BSSID = list[row][0],", "Building = list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency", "'none' lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14] = '-110' lists[row][15] = 'none'", "# Write all info in DB into a csv file, without SSID stored,", "value would be none) with order according to the unchanged file 'APs.csv' (to", "Location_x, Location_y, BSSID, Frequency, Level) if Done == '1': refreshCSV(Building, Floor, Location_x, Location_y,", "AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with", "Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY,", "reader = csv.reader(csvfile) APs = [row[0] for row in reader] APlength = len(APs)", "GeoX, GeoY, GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency,", "'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency,", "= 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14] =", "= ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14] =", "Model = request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX'] AccY", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID", "Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8],", "reader = csv.reader(csvfile) RSS = [row for row in reader] with open('tempList.csv', 'w',", "AP info in a defined order) Each time one complete info of AP", "x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9],", "#edition5 isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor'] Location_x =", "([ BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy,", "= list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy =", "RSS[row][11] = ORIy RSS[row][12] = ORIz RSS[row][13] = Level RSS[row][14] = GeoX RSS[row][15]", "open('userinput.csv', 'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "not os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY',", "lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] = 'none' lists[row][8]", "-110, magnetic field value would be none) with order according to the unchanged", "into a csv file, without SSID stored, encode mode is UTF-8 (as some", "range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8],", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile: reader", "def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY,", "0 for AP in APs: lists[row][0] = AP lists[row][1] = 'none' lists[row][2] =", "list[row][2], Floor = list[row][3], Location_x = list[row][4], Location_y = list[row][5], Frequency = list[row][6],", "print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level,", "]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='')", "GeoX, GeoY, GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def", "RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/',", "a defined order) Each time one complete info of AP arrival, (assume there", "data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9],", "from 'tempList.csv' and be added in 'xxx.csv'(which stores all info that is similar", "list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ = list[row][9],", "AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(list)", "Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level,", "row in reader] APlength = len(APs) lists = [[0 for col in range(19)]", "lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18]", "open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x',", "RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='') as csvfile: spamwriter =", "times (AP number), the function 'refreshCSV()' would be called. Then scan info of", "as csvfile: #Check is tempList is empty if not os.path.getsize('./xxx.csv'): #file not established", "in reader] #print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0] == BSSID: RSS[row][1] =", "= request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx,", "ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor,", "np import tensorflow as tf from sklearn.preprocessing import scale import matplotlib.pyplot as plt", "= list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15],", "'none' row += 1 with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile,", "'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y',", "RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ])", "[row[0] for row in reader] APlength = len(APs) lists = [[0 for col", "with one line of AP's info. After 60 times (AP number), the function", "Time): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) RSS = [row", "= request.form['AccX'] AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy =", "#whole database with open('APs.csv', 'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter =", "GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "transmission. ############################################################################################ ''' # coding: utf-8 from flask import Flask, request from app", "lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check if the input AP's", "'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7] = 'none'", "RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10]", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for", "#Check is tempList is empty if not os.path.getsize('./xxx.csv'): #file not established spamwriter =", "'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data =", "is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time'])", "UTF-8 (as some SSID contains chinese characters) #edition def addAllCSV(): #whole database with", "will be initialized with default data (e.g. for signal level RSS it would", "all been sent), the 'tempList.csv' would be refreshed with one line of AP's", "magnetic field value would be none) with order according to the unchanged file", "with order according to the unchanged file 'APs.csv' (to store the AP info", "'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][12] = 'none'", "RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post():", "repeated 60 times and one symbol called \"Done\" would be set to '1'", "ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx,", "'r', newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0] for row in", "for u in users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency,", "############################################################################################ ''' # coding: utf-8 from flask import Flask, request from app import", "one complete info of AP arrival, (assume there are 60 APs that is", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency,", "not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time'])", "SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level) if Done ==", "Then scan info of once would be be copied from 'tempList.csv' and be", "list[row][10], ORIy = list[row][11], ORIz = list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16])", "empty if not os.path.getsize('./tempList.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y',", "csv.reader(csvfile) RSS = [row for row in reader] #print(RSS,RSS[0][0]) for row in range(1,517):", "check last time's scan info). Finally, refresh 'tempList.csv' with default value for next", "Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level'] AccX", "'APs.csv' (to store the AP info in a defined order) Each time one", "Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y, BSSID, Frequency, Level) if", "#edition def addAllCSV(): #whole database with open('APs.csv', 'w', newline='') as csvfile: if not", "request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz']", "would be set to '1' for last time, which means info of one", "Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model,", "= AccX RSS[row][8] = AccY RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11] =", "'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ RSS[i][0], RSS[i][1],", "detected once, then the transmission would be repeated 60 times and one symbol", "Write all info in DB into a csv file, without SSID stored, encode", "Location_y RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8] = AccY RSS[row][9] = AccZ", "Location_y RSS[row][17] = Model x = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5],", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r', newline='') as", "+= 1 with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE)", "RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter = csv.writer(csvfile,", "data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx,", "request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ = request.form['GeoZ']", "numpy as np import tensorflow as tf from sklearn.preprocessing import scale import matplotlib.pyplot", "row in reader] #print(RSS,RSS[0][0]) for row in range(1,517): if RSS[row][0] == BSSID: RSS[row][1]", "row = 0 for row in range(0,517): if AP == list[row][0]: return row", "'a+', newline='') as csvfile: #Check is tempList is empty if not os.path.getsize('./xxx.csv'): #file", "Location_x, Location_y, Model) initializeTempList() print('1') else: print('0') return 'OK.' if __name__ == \"__main__\":", "would be be copied from 'tempList.csv' and be added in 'xxx.csv'(which stores all", "one line of AP's info. After 60 times (AP number), the function 'refreshCSV()'", "RSS[row][17] = Model RSS[row][18] = Time with open('tempList.csv', 'w', newline='') as csvfile: spamwriter", "request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID'] AccX = request.form['AccX'] AccY = request.form['AccY']", "'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data = ([ lists[i][0], lists[i][1],", "spamwriter.writerow(data) def refreshCSV(Building, Floor, Location_x, Location_y, Model): with open('tempList.csv', 'r', newline='') as csvfile:", "'Model', 'Time']) for i in range(1,517): data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3],", "GeoZ, Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building,", "reader] with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building',", "os.path.getsize('./oneTime.csv'): #file is empty spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ',", "ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID,", "Zhenghang(<NAME> ############################################################################################ It's a light server based on FLASK micro framework, 1.Requirements: Python", "1 #edition2 with open('tempList.csv', 'a+', newline='') as csvfile: #Check is tempList is empty", "database def showAPs(num): ap = models.User.query.get(num) print(ap.BSSID, ap.SSID, ap.Building, ap.Floor,ap.Location_x, ap.Location_y, ap.Frequency, ap.AccX,", "def post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building'] Floor =", "the application object app = Flask(__name__) #edition # Write all info in DB", "AP arrival, (assume there are 60 APs that is detected once, then the", "lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11] = 'none' lists[row][13] = 'none' lists[row][14]", "data = ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9],", "if __name__ == \"__main__\": #Use local host IP for local server #Or IPV4", "GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile)", "lists = [[0 for col in range(19)] for row in range(APlength)] row =", "u in users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX,", "print('1') else: print('0') return 'OK.' if __name__ == \"__main__\": #Use local host IP", "app = Flask(__name__) #edition # Write all info in DB into a csv", "= 'none' lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] =", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX,", "initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile) APs = [row[0]", "#addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room, Location_x, Location_y,", "RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6] = Frequency RSS[row][7] = AccX RSS[row][8]", "IP for local server #Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add", "which contains 200 APs def checkAP(list, AP): row = 0 for row in", "in range(19)] for row in range(APlength)] row = 0 for AP in APs:", "= request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY = request.form['GeoY'] GeoZ =", "RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with open('xxx.csv', 'a+',", "GeoY, GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList():", "data = ([ BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ,", "this python file, A temporary file called 'tempList.csv' will be initialized with default", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='') as csvfile: reader =", "ORIz = request.form['ORIz'] Done = request.form['Done'] #addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency,", "if the input AP's BSSID is in the mapping.csv, which contains 200 APs", "#algorithm part import pandas as pdb import numpy as np import tensorflow as", "input AP's BSSID is in the mapping.csv, which contains 200 APs def checkAP(list,", "be be copied from 'tempList.csv' and be added in 'xxx.csv'(which stores all info", "RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data)", "u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ, u.ORIx, u.ORIy, u.ORIz, u.Level, u.GeoX, u.GeoY,", "'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for i in range(0,517): data = ([ lists[i][0], lists[i][1], lists[i][2],", "spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i in range(1,517): data = ([ lists[i][0],", "RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile:", "RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(x) with open('xxx.csv', 'a', newline='')", "the transmission would be repeated 60 times and one symbol called \"Done\" would", "Model, Time) #addAPs(list) #addAllCSV() #addAPs(Building, Room, Location_x, Location_y, SSID,BSSID, Frequency, Level) #addCSV(Building, Room,", "spamwriter.writerow([ 'BSSID','SSID','Building', 'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in users: data =", "csvfile: reader = csv.reader(csvfile) RSS = [row for row in reader] #print(RSS,RSS[0][0]) for", "RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break def isEmpty(): with", "lists[row][14] = 'none' lists[row][15] = 'none' lists[row][16] = 'none' lists[row][17] = 'none' lists[row][18]", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data = ([ RSS[row][0],", "1 with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID',", "list[row][12], Level = list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info", "RSS[i][9], RSS[i][10], RSS[i][11], RSS[i][12], RSS[i][13], RSS[i][14], RSS[i][15], RSS[i][16], RSS[i][17], RSS[i][18] ]) spamwriter.writerow(data) break", "defined order) Each time one complete info of AP arrival, (assume there are", "ap.Frequency, ap.AccX, ap.AccY, ap.AccZ, ap.ORIx, ap.ORIy, ap.ORIz, ap.Level, ap.GeoX, ap.GeoY, ap.GeoZ) def deleteDB():", "= ([ lists[i][0], lists[i][1], lists[i][2], lists[i][3], lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10],", "= list[row][13], GeoX=list[row][14], GeoY=list[row][15], GeoZ=list[row][16]) db.session.add(u) db.session.commit() #Show all RSS info from database", "in range(1,517): data = ([ RSS[i][0], RSS[i][1], RSS[i][2], RSS[i][3], RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7],", "AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz = request.form['ORIz'] Done", "in range(1,517): data = ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7],", "#Or IPV4 address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into database", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID', 'Building','Floor','Location_x','Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) for i", "csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) #edition3 for row in range(1,517): data =", "request.form['Floor'] Location_x = request.form['Location_x'] Location_y = request.form['Location_y'] Frequency = request.form['Frequency'] Level = request.form['Level']", "info. After 60 times (AP number), the function 'refreshCSV()' would be called. Then", "Time) #addAPs(BSSID, Building, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level,", "= csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ', 'ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) for", "app.db def addAPs(list): for row in range(0,517): u = models.User(BSSID = list[row][0], SSID", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x','Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz', 'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='')", "lists[row][17] = 'none' lists[row][18] = 'none' row += 1 #edition2 with open('tempList.csv', 'a+',", "would be called. Then scan info of once would be be copied from", "'Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level','GeoX','GeoY','GeoZ']) users = models.User.query.all() for u in users: data = ([u.BSSID, u.SSID,", "one time's scanner result def addCSV(BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX,", "isEmpty() BSSID = request.form['BSSID'] Building = request.form['Building'] Floor = request.form['Floor'] Location_x = request.form['Location_x']", "= Building RSS[row][3] = Floor RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][6] =", "quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building','Floor','Location_x', 'Location_y','Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i in range(1,517): data", "is tempList is empty if not os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile,", "that is detected once, then the transmission would be repeated 60 times and", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('userinput.csv', 'a', newline='') as", "lists[i][4], lists[i][5], lists[i][6], lists[i][7], lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16],", "RSS[row][4] = Location_x RSS[row][5] = Location_y RSS[row][17] = Model x = ([ RSS[row][0],", "users: data = ([u.BSSID, u.SSID, u.Buidling, u.Floor, u.Location_x, u.Location_y, u.Frequency, u.AccX, u.AccY, u.AccZ,", "Level = request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX = request.form['GeoX'] GeoY", "BSSID, SSID, Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz,", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) #add one time's scanner result def addCSV(BSSID, SSID, Building, Floor,", "AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time ]) spamwriter", "with open('tempList.csv', 'w', newline='') as csvfile: spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x',", "RSS[row][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16], RSS[row][17], RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='')", "list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ = list[row][9], ORIx = list[row][10],", "be copied from 'tempList.csv' and be added in 'xxx.csv'(which stores all info that", "(to store the AP info in a defined order) Each time one complete", "AccY RSS[row][9] = AccZ RSS[row][10] = ORIx RSS[row][11] = ORIy RSS[row][12] = ORIz", "#refreshCSV(SSID,Building, Floor, Location_x, Location_y, Frequency, AccX, AccY, AccZ, ORIx, ORIy, ORIz, Level, GeoX,", "= list[row][8], AccZ = list[row][9], ORIx = list[row][10], ORIy = list[row][11], ORIz =", "complete info of AP arrival, (assume there are 60 APs that is detected", "(e.g. for signal level RSS it would be -110, magnetic field value would", "]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file is", "name is app.db def addAPs(list): for row in range(0,517): u = models.User(BSSID =", "lists[row][3] = 'none' lists[row][4] = 'none' lists[row][5] = 'none' lists[row][6] = 'none' lists[row][7]", "lists[row][7] = 'none' lists[row][8] = 'none' lists[row][9] = 'none' lists[row][10] = 'none' lists[row][11]", "range(19)] for row in range(APlength)] row = 0 for AP in APs: lists[row][0]", "the unchanged file 'APs.csv' (to store the AP info in a defined order)", "to '1' for last time, which means info of one scan has all", "ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time ]) spamwriter = csv.writer(csvfile, delimiter=',',", "= request.form['Frequency'] Level = request.form['Level'] AccX = request.form['AccX'] AccY = request.form['AccY'] GeoX =", "encode mode is UTF-8 (as some SSID contains chinese characters) #edition def addAllCSV():", "60 times and one symbol called \"Done\" would be set to '1' for", "set to '1' for last time, which means info of one scan has", "next time's transmission. ############################################################################################ ''' # coding: utf-8 from flask import Flask, request", "([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10], RSS[row][11],", "RSS[row][18] ]) spamwriter.writerow(data) with open('oneTime.csv', 'a', newline='') as csvfile: if not os.path.getsize('./oneTime.csv'): #file", "'Level', 'GeoX','GeoY','GeoZ', 'Model', 'Time']) with open('mapping.csv', 'r', newline='') as csvfile: reader = csv.reader(csvfile)", "ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #addCSV(BSSID, Building, Location_x, Location_y, Frequency, AccX,", "AccY = request.form['AccY'] AccZ = request.form['AccZ'] ORIx = request.form['ORIx'] ORIy = request.form['ORIy'] ORIz", "lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data) #Check", "there are 60 APs that is detected once, then the transmission would be", "spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) def initializeTempList(): with open('mapping.csv', 'r', newline='') as", "= request.form['GeoZ'] Model = request.form['Model'] Time = request.form['Time'] SSID = request.form['SSID'] AccX =", "debug=True) ''' #Add RSS info into database whose name is app.db def addAPs(list):", "csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY','AccZ','ORIx','ORIy','ORIz','Level', 'GeoX', 'GeoY', 'GeoZ', 'Model', 'Time']) for", "the 'tempList.csv' would be refreshed with one line of AP's info. After 60", "quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID', 'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID,", "path import importlib from model import * #algorithm part import pandas as pdb", "address #app.run(host='192.168.xxx.xxx', debug=True) app.run(host='192.168.xxx.xxx', debug=True) ''' #Add RSS info into database whose name", "'GeoX','GeoY', 'GeoZ', 'Model', 'Time']) #edition4 for i in range(1,517): data = ([ RSS[i][0],", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time): with open('tempList.csv', 'r',", "import os #to get current path import importlib from model import * #algorithm", "RSS[i][4], RSS[i][5], RSS[i][6], RSS[i][7], RSS[i][8], RSS[row][9], RSS[row][10], RSS[row][11], RSS[i][12], RSS[row][13], RSS[row][14], RSS[row][15], RSS[row][16],", "= list[row][5], Frequency = list[row][6], AccX = list[row][7], AccY = list[row][8], AccZ =", "'a', newline='') as csvfile: if not os.path.getsize('./userinput.csv'): spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID',", "lists[i][8], lists[i][9], lists[i][10], lists[i][11], lists[i][12], lists[i][13], lists[i][14], lists[i][15], lists[i][16], lists[i][17], lists[i][18] ]) spamwriter.writerow(data)", "database with open('APs.csv', 'w', newline='') as csvfile: if not os.path.getsize('./APs.csv'): spamwriter = csv.writer(csvfile,", "AccZ, ORIx, ORIy, ORIz, Level, GeoX, GeoY, GeoZ, Model, Time) #refreshCSV(SSID,Building, Floor, Location_x,", "info of one scan has all been sent), the 'tempList.csv' would be refreshed", "delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(data) @app.route('/', methods=['POST']) def post(): #isEmpty() #edition5 isEmpty() BSSID = request.form['BSSID']", "empty if not os.path.getsize('./xxx.csv'): #file not established spamwriter = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_NONE) spamwriter.writerow(['BSSID','SSID','Building',", "= ([ RSS[row][0], RSS[row][1], RSS[row][2], RSS[row][3], RSS[row][4], RSS[row][5], RSS[row][6], RSS[row][7], RSS[row][8], RSS[row][9], RSS[row][10],", "'SSID','Building', 'Floor','Location_x', 'Location_y', 'Frequency','AccX','AccY', 'AccZ','ORIx','ORIy','ORIz','Level', 'GeoX','GeoY','GeoZ', 'Model','Time']) data = ([ BSSID, SSID, Building," ]
[ "= text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self): data = {'text': self.toPlainText()}", "import QPlainTextEdit from PySide2.QtCore import Qt # from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit,", "self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text = text", "self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key() == Qt.Key_Enter or", "keyPressEvent(self, event): if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text =", "self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText() if", "event) def keyPressEvent(self, event): if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText())", "QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event): txt =", "or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self,", "txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key() == Qt.Key_Enter or event.key() ==", "MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text =", "self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key() == Qt.Key_Enter", "from PySide2.QtCore import Qt # from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def", "Qt # from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self,", "= self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def", "get_text(self): return self.toPlainText() def get_data(self): data = {'text': self.toPlainText()} return data def set_data(self,", "import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250,", "editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self): data", "get_data(self): data = {'text': self.toPlainText()} return data def set_data(self, data): self.setPlainText(data['text']) def remove_event(self):", "# from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params)", "class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.')", "def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText()", "self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self):", "self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text", "event): txt = self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self,", "MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event):", "self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self): data = {'text':", "import * from PySide2.QtWidgets import QPlainTextEdit from PySide2.QtCore import Qt # from PySide2.QtGui", "params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event): txt", "self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText() if txt !=", "if txt != self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event):", "self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update()", "self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self,", "self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self): data = {'text': self.toPlainText()} return data", "QPlainTextEdit from PySide2.QtCore import Qt # from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB):", "else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return", "params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self,", "PySide2.QtWidgets import QPlainTextEdit from PySide2.QtCore import Qt # from PySide2.QtGui import ... class", "event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self,", "event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text):", "PySide2.QtCore import Qt # from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self,", "txt = self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event)", "= txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key() == Qt.Key_Enter or event.key()", "self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key() ==", "= self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def", "self.last_text = self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText() if txt != self.last_text:", "from NIWENV import * from PySide2.QtWidgets import QPlainTextEdit from PySide2.QtCore import Qt #", "= self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText() if txt != self.last_text: self.editing_finished(txt)", "%CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text", "focusOutEvent(self, event): txt = self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text = txt", "def get_data(self): data = {'text': self.toPlainText()} return data def set_data(self, data): self.setPlainText(data['text']) def", "import Qt # from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params):", "PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet())", "* from PySide2.QtWidgets import QPlainTextEdit from PySide2.QtCore import Qt # from PySide2.QtGui import", "return self.toPlainText() def get_data(self): data = {'text': self.toPlainText()} return data def set_data(self, data):", "__init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def", "!= self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key()", "== Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event)", "self.toPlainText() def get_data(self): data = {'text': self.toPlainText()} return data def set_data(self, data): self.setPlainText(data['text'])", "txt != self.last_text: self.editing_finished(txt) self.last_text = txt QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if", "30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText() if txt", "def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self):", "data = {'text': self.toPlainText()} return data def set_data(self, data): self.setPlainText(data['text']) def remove_event(self): pass", "Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def", "QPlainTextEdit.focusOutEvent(self, event) def keyPressEvent(self, event): if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return:", "def keyPressEvent(self, event): if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text", "NIWENV import * from PySide2.QtWidgets import QPlainTextEdit from PySide2.QtCore import Qt # from", "self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30) self.setPlainText('obj.') self.last_text = self.toPlainText() def focusOutEvent(self, event): txt = self.toPlainText()", "event): if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText()", "if event.key() == Qt.Key_Enter or event.key() == Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else:", "event) def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def", "from PySide2.QtGui import ... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self)", "Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text =", "def focusOutEvent(self, event): txt = self.toPlainText() if txt != self.last_text: self.editing_finished(txt) self.last_text =", "QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return self.toPlainText()", "text): self.parent_node_instance.text = text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self): data =", "... class %CLASS%(QPlainTextEdit, MWB): def __init__(self, params): MWB.__init__(self, params) QPlainTextEdit.__init__(self) self.setStyleSheet(self.parent_node_instance.session_stylesheet()) self.setFixedSize(250, 30)", "== Qt.Key_Return: self.editing_finished(self.toPlainText()) self.last_text = self.toPlainText() else: QPlainTextEdit.keyPressEvent(self, event) def editing_finished(self, text): self.parent_node_instance.text", "def get_text(self): return self.toPlainText() def get_data(self): data = {'text': self.toPlainText()} return data def", "from PySide2.QtWidgets import QPlainTextEdit from PySide2.QtCore import Qt # from PySide2.QtGui import ...", "text self.parent_node_instance.update() def get_text(self): return self.toPlainText() def get_data(self): data = {'text': self.toPlainText()} return" ]
[ "<gh_stars>0 print \"\\n Welcome to the Model Major Generator. This program gives you", "Major Generator. This program gives you an image to help you memorise any", "the number you'd like to memorise \\n\") while len(the_input) > 1: next_number =", "= \"\" the_input = raw_input(\"Enter the number you'd like to memorise \\n\") while", "help you memorise any number \\n\" def create_mnemonic(): import pegs mnemonic = \"\"", "int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += \" \" + next_image", "the_input[2:] mnemonic += \" \" + next_image if len(the_input) == 1: next_number =", "gives you an image to help you memorise any number \\n\" def create_mnemonic():", "next_image = pegs.single_digit_peg_list[next_number] mnemonic += \" \" + next_image else: pass print mnemonic", "to memorise \\n\") while len(the_input) > 1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number]", "next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += \" \"", "while len(the_input) > 1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:]", "\\n\") while len(the_input) > 1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input =", "number you'd like to memorise \\n\") while len(the_input) > 1: next_number = int(the_input[0:2])", "= pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += \" \" + next_image if len(the_input)", "len(the_input) == 1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += \" \"", "next_image if len(the_input) == 1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic +=", "= int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += \" \" +", "create_mnemonic(): import pegs mnemonic = \"\" the_input = raw_input(\"Enter the number you'd like", "Welcome to the Model Major Generator. This program gives you an image to", "image to help you memorise any number \\n\" def create_mnemonic(): import pegs mnemonic", "\\n\" def create_mnemonic(): import pegs mnemonic = \"\" the_input = raw_input(\"Enter the number", "Generator. This program gives you an image to help you memorise any number", "def create_mnemonic(): import pegs mnemonic = \"\" the_input = raw_input(\"Enter the number you'd", "This program gives you an image to help you memorise any number \\n\"", "pegs mnemonic = \"\" the_input = raw_input(\"Enter the number you'd like to memorise", "mnemonic += \" \" + next_image if len(the_input) == 1: next_number = int(the_input[0:1])", "the_input = raw_input(\"Enter the number you'd like to memorise \\n\") while len(the_input) >", "the_input = the_input[2:] mnemonic += \" \" + next_image if len(the_input) == 1:", "memorise any number \\n\" def create_mnemonic(): import pegs mnemonic = \"\" the_input =", "next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += \" \" + next_image if", "+= \" \" + next_image if len(the_input) == 1: next_number = int(the_input[0:1]) next_image", "mnemonic = \"\" the_input = raw_input(\"Enter the number you'd like to memorise \\n\")", "you'd like to memorise \\n\") while len(the_input) > 1: next_number = int(the_input[0:2]) next_image", "like to memorise \\n\") while len(the_input) > 1: next_number = int(the_input[0:2]) next_image =", "an image to help you memorise any number \\n\" def create_mnemonic(): import pegs", "next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += \" \" + next_image else:", "\"\\n Welcome to the Model Major Generator. This program gives you an image", "\"\" the_input = raw_input(\"Enter the number you'd like to memorise \\n\") while len(the_input)", "any number \\n\" def create_mnemonic(): import pegs mnemonic = \"\" the_input = raw_input(\"Enter", "= raw_input(\"Enter the number you'd like to memorise \\n\") while len(the_input) > 1:", "= the_input[2:] mnemonic += \" \" + next_image if len(the_input) == 1: next_number", "+ next_image if len(the_input) == 1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic", "print \"\\n Welcome to the Model Major Generator. This program gives you an", "import pegs mnemonic = \"\" the_input = raw_input(\"Enter the number you'd like to", "\" \" + next_image if len(the_input) == 1: next_number = int(the_input[0:1]) next_image =", "= int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += \" \" + next_image else: pass", "len(the_input) > 1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic", "the Model Major Generator. This program gives you an image to help you", "if len(the_input) == 1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += \"", "> 1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic +=", "1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += \" \" + next_image", "= pegs.single_digit_peg_list[next_number] mnemonic += \" \" + next_image else: pass print mnemonic create_mnemonic()", "to the Model Major Generator. This program gives you an image to help", "Model Major Generator. This program gives you an image to help you memorise", "raw_input(\"Enter the number you'd like to memorise \\n\") while len(the_input) > 1: next_number", "\" + next_image if len(the_input) == 1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number]", "== 1: next_number = int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += \" \" +", "you an image to help you memorise any number \\n\" def create_mnemonic(): import", "you memorise any number \\n\" def create_mnemonic(): import pegs mnemonic = \"\" the_input", "memorise \\n\") while len(the_input) > 1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input", "to help you memorise any number \\n\" def create_mnemonic(): import pegs mnemonic =", "pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += \" \" + next_image if len(the_input) ==", "program gives you an image to help you memorise any number \\n\" def", "int(the_input[0:1]) next_image = pegs.single_digit_peg_list[next_number] mnemonic += \" \" + next_image else: pass print", "number \\n\" def create_mnemonic(): import pegs mnemonic = \"\" the_input = raw_input(\"Enter the", "1: next_number = int(the_input[0:2]) next_image = pegs.pegs_list[next_number] the_input = the_input[2:] mnemonic += \"" ]
[ "None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove()", "KIND, either express or implied. # See the License for the specific language", "in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require composite index query = db.Query(Item)", "Unless required by applicable law or agreed to in writing, software # distributed", "2) query.order('-z') for _ in query.fetch(1): pass expected = \"\"\"indexes: - kind: Item", "query.fetch(1): pass expected = \"\"\"indexes: - kind: Item properties: - name: x -", "return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity')", "self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3],", "def testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) if __name__ == '__main__':", "'121', '120'], query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >',", "in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3)", "query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140',", "from google.appengine.ext import db # A query limit large enough to return all", "tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual(", "_ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def", "composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) if", "this file except in compliance with the License. # You may obtain a", "tests import test_util from google.appengine.api import datastore_errors from google.appengine.ext import db # A", "global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global", "the License. \"\"\"Unit tests for composite_query.py.\"\"\" import unittest from __mimic import common from", "- kind: Item properties: - name: x - name: y - name: z", "to use ancestor queries for strong # consistency in the High Replication Datastore.", "for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require composite index query", "def testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([], query)", "'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo',", "composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def", "self.CheckQuery(['130', '131'], query) # remove patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch,", "ANY KIND, either express or implied. # See the License for the specific", "High Replication Datastore. We initialize this global # variable in setUp after calling", "query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self):", "1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query) def testNonEqFilter(self):", "k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =',", "= db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121',", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "= Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some data for x in range(5):", "db.IntegerProperty() z = db.IntegerProperty() # Having a root entity key allows us to", "query.filter('x =', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query)", "self.CheckQuery(['120', '121', '122', '123', '124'], query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x =',", "'120'], query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 3)", "'143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x", "of index definitions.\"\"\" def setUp(self): # always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME)", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "x = db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty() # Having a root", "expected = \"\"\"indexes: - kind: Item properties: - name: x - name: y", "add some data for x in range(5): for y in range(5): for z", "for z in range(5): name = '%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY,", "# consistency in the High Replication Datastore. We initialize this global # variable", "= db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'],", "setUp(self): # always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar'])", "OF ANY KIND, either express or implied. # See the License for the", "datastore_errors from google.appengine.ext import db # A query limit large enough to return", "query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query) def testNonEqFilter(self): query", "class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions that maintain a set of index", "1) query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x", "governing permissions and # limitations under the License. \"\"\"Unit tests for composite_query.py.\"\"\" import", "query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query) # remove patch and", "again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should still work query =", "'122', '123', '124'], query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def", "'144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x =',", "under the License. \"\"\"Unit tests for composite_query.py.\"\"\" import unittest from __mimic import common", "2012 Google Inc. All Rights Reserved. # # Licensed under the Apache License,", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "=', 1) query.filter('y =', 2) query.order('-z') for _ in query.fetch(1): pass expected =", "query.filter('y =', 2) query.order('-z') for _ in query.fetch(1): pass expected = \"\"\"indexes: -", "def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True", "composite index query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120', '121',", "def setUp(self): # always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo',", "integer properties.\"\"\" x = db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty() # Having", "_ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some data for x in", "tests for composite_query.py.\"\"\" import unittest from __mimic import common from __mimic import composite_query", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some data for x in range(5): for", "self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual( expected,", "large enough to return all data. _BIG_ENOUGH = 100 _MODULE_SETUP = False class", "name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603", "patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "3) self.CheckQuery(['140', '141', '142', '143', '144'], query) def testEmptyResult(self): query = db.Query(Item) query.filter('x", ">', 3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def", "Replication Datastore. We initialize this global # variable in setUp after calling test_util.InitAppHostingApi().", "'141', '142', '143', '144'], query) def testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1)", "keys_only=True) query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name()", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "_BIG_ENOUGH) # simple queries should still work query = db.Query(Item) query.filter('x =', 1)", "testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') for", "= 100 _MODULE_SETUP = False class Item(db.Model): \"\"\"A simple entity with 3 integer", "expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3], [e.key().name()", "required by applicable law or agreed to in writing, software # distributed under", "that maintain a set of index definitions.\"\"\" def setUp(self): # always start with", "self.CheckQuery(['124', '123', '122', '121', '120'], query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x =',", "simple entity with 3 integer properties.\"\"\" x = db.IntegerProperty() y = db.IntegerProperty() z", "applicable law or agreed to in writing, software # distributed under the License", "name: x - name: y - name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def", "composite_query.py.\"\"\" import unittest from __mimic import common from __mimic import composite_query from tests", "def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z')", "import test_util from google.appengine.api import datastore_errors from google.appengine.ext import db # A query", "expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require composite", "or agreed to in writing, software # distributed under the License is distributed", "all data. _BIG_ENOUGH = 100 _MODULE_SETUP = False class Item(db.Model): \"\"\"A simple entity", "3 integer properties.\"\"\" x = db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty() #", "self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes:", "query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require composite index query = db.Query(Item) query.filter('x", "3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2)", "direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP:", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "# try a slice self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)]) def", "try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should still work", "query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143', '144'], query) def testEmptyResult(self): query =", "for x in range(5): for y in range(5): for z in range(5): name", "3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query) # remove patch and try query", "disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some data for x", "range(5): for z in range(5): name = '%d%d%d' % (x, y, z) Item(key_name=name,", "We initialize this global # variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY =", "=', 1) query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item, keys_only=True)", "Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "'143', '144'], query) def testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >',", "the functions that maintain a set of index definitions.\"\"\" def setUp(self): # always", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "_ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some", "writing, software # distributed under the License is distributed on an \"AS IS\"", "query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =',", "# always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes)", "db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item)", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "=', 1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for k", "License. # You may obtain a copy of the License at # #", "Item(db.Model): \"\"\"A simple entity with 3 integer properties.\"\"\" x = db.IntegerProperty() y =", "query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item,", "2) self.CheckQuery(['120', '121', '122', '123', '124'], query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x", "compliance with the License. # You may obtain a copy of the License", "a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']),", "<filename>tests/composite_query_test.py # Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under", "test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 #", "composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check", "= db.Query(Item) query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self): query", "known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes())", "CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual( expected, [e.key().name() for", "=', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query) def testNonEqFilter(self): query =", "all self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual(", "should still work query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) def", "composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes())", "query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for", "query = db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142',", "z = db.IntegerProperty() # Having a root entity key allows us to use", "Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the Apache", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "# add some data for x in range(5): for y in range(5): for", "_MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY =", "pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint:", "range(5): for y in range(5): for z in range(5): name = '%d%d%d' %", "False class Item(db.Model): \"\"\"A simple entity with 3 integer properties.\"\"\" x = db.IntegerProperty()", "query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual( expected, [e.key().name() for e in", "return all data. _BIG_ENOUGH = 100 _MODULE_SETUP = False class Item(db.Model): \"\"\"A simple", "not use this file except in compliance with the License. # You may", "x in range(5): for y in range(5): for z in range(5): name =", "simple queries should still work query = db.Query(Item) query.filter('x =', 1) query.filter('y =',", "a slice self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): #", "query) # remove patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) #", "\"\"\"indexes: - kind: Item properties: - name: x - name: y - name:", "y in range(5): for z in range(5): name = '%d%d%d' % (x, y,", "1) query.filter('y =', 2) query.order('-z') for _ in query.fetch(1): pass expected = \"\"\"indexes:", "'144'], query) def testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 10)", "slice self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't", "\"\"\"Unit tests for the functions that maintain a set of index definitions.\"\"\" def", "License, Version 2.0 (the \"License\"); # you may not use this file except", "setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query):", "db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') for _ in query.fetch(1): pass", "query.filter('x =', 1) query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query) #", "# pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY #", "self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require", "in range(5): for y in range(5): for z in range(5): name = '%d%d%d'", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140', '141',", "def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes())", "query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x", "import common from __mimic import composite_query from tests import test_util from google.appengine.api import", "# you may not use this file except in compliance with the License.", "def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\" self.assertEquals(expected,", "agreed to in writing, software # distributed under the License is distributed on", "global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 # add", "1) query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query) # remove patch", "= db.IntegerProperty() z = db.IntegerProperty() # Having a root entity key allows us", "= db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query =", "variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self):", "= db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty() # Having a root entity", "(the \"License\"); # you may not use this file except in compliance with", "from __mimic import common from __mimic import composite_query from tests import test_util from", "y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the", "# Unless required by applicable law or agreed to in writing, software #", "class Item(db.Model): \"\"\"A simple entity with 3 integer properties.\"\"\" x = db.IntegerProperty() y", "- name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint:", "ancestor queries for strong # consistency in the High Replication Datastore. We initialize", "by applicable law or agreed to in writing, software # distributed under the", "# A query limit large enough to return all data. _BIG_ENOUGH = 100", "db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'],", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions", "y = db.IntegerProperty() z = db.IntegerProperty() # Having a root entity key allows", "this global # variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class", "query limit large enough to return all data. _BIG_ENOUGH = 100 _MODULE_SETUP =", "query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122',", "testSimpleQuery(self): # shouldn't require composite index query = db.Query(Item) query.filter('x =', 1) query.filter('y", "Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version", "class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def", "file except in compliance with the License. # You may obtain a copy", "for the functions that maintain a set of index definitions.\"\"\" def setUp(self): #", "work query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml()", "and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should still", "common from __mimic import composite_query from tests import test_util from google.appengine.api import datastore_errors", "desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return", "import datastore_errors from google.appengine.ext import db # A query limit large enough to", "expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual( expected, [e.key().name() for e", "use ancestor queries for strong # consistency in the High Replication Datastore. We", "License for the specific language governing permissions and # limitations under the License.", "\"\"\"Unit tests for composite_query.py.\"\"\" import unittest from __mimic import common from __mimic import", "to in writing, software # distributed under the License is distributed on an", "= db.Query(Item) query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143', '144'],", "range(5): name = '%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put()", "implied. # See the License for the specific language governing permissions and #", "'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(),", "\"License\"); # you may not use this file except in compliance with the", "'142', '143', '144'], query) def testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y", "# shouldn't require composite index query = db.Query(Item) query.filter('x =', 1) query.filter('y =',", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar',", "pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some data for", "in range(5): for z in range(5): name = '%d%d%d' % (x, y, z)", "z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions that maintain a set of", "test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install()", "testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) if __name__ == '__main__': unittest.main()", "[e.key().name() for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require composite index", "License. \"\"\"Unit tests for composite_query.py.\"\"\" import unittest from __mimic import common from __mimic", "def testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140', '141',", "setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY)", "=', 1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =',", "initialize this global # variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None", "def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all", "index query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122',", "or implied. # See the License for the specific language governing permissions and", "query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)])", "maintain a set of index definitions.\"\"\" def setUp(self): # always start with a", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "for composite_query.py.\"\"\" import unittest from __mimic import common from __mimic import composite_query from", "fetching all self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try a slice", "query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130',", "a set of index definitions.\"\"\" def setUp(self): # always start with a known", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "in the High Replication Datastore. We initialize this global # variable in setUp", "x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions that maintain a", "= True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint:", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') for _ in query.fetch(1):", "(x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for", "'%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit", "index definitions.\"\"\" def setUp(self): # always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes", "_BIG_ENOUGH = 100 _MODULE_SETUP = False class Item(db.Model): \"\"\"A simple entity with 3", "# limitations under the License. \"\"\"Unit tests for composite_query.py.\"\"\" import unittest from __mimic", "# simple queries should still work query = db.Query(Item) query.filter('x =', 1) query.filter('y", "db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123', '124'], query)", "some data for x in range(5): for y in range(5): for z in", "set of index definitions.\"\"\" def setUp(self): # always start with a known state", "query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should still work query", "100 _MODULE_SETUP = False class Item(db.Model): \"\"\"A simple entity with 3 integer properties.\"\"\"", "query.filter('x =', 1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x", "z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if", "'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']),", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "limitations under the License. \"\"\"Unit tests for composite_query.py.\"\"\" import unittest from __mimic import", "you may not use this file except in compliance with the License. #", "the High Replication Datastore. We initialize this global # variable in setUp after", "calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch()", "= db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123', '124'],", "'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes())", "query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z')", "language governing permissions and # limitations under the License. \"\"\"Unit tests for composite_query.py.\"\"\"", "Item properties: - name: x - name: y - name: z direction: desc\"\"\"", "pass expected = \"\"\"indexes: - kind: Item properties: - name: x - name:", "2) self.CheckQuery(['130', '131'], query) # remove patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError,", "=', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query) # remove patch and try", "query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query", "use this file except in compliance with the License. # You may obtain", "'141', '142', '143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query =", "Having a root entity key allows us to use ancestor queries for strong", "try a slice self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self):", "queries should still work query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3)", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", ">', 10) self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =', 1)", "for e in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3], [e.key().name() for e", "shouldn't require composite index query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2)", "=', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1) query.filter('y =',", "def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual( expected, [e.key().name()", "=', 1) query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query) # remove", "2.0 (the \"License\"); # you may not use this file except in compliance", "to return all data. _BIG_ENOUGH = 100 _MODULE_SETUP = False class Item(db.Model): \"\"\"A", "query) def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >', 3)", "= False class Item(db.Model): \"\"\"A simple entity with 3 integer properties.\"\"\" x =", "IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions that maintain a set of index definitions.\"\"\"", "query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123', '124'], query) def testDescendingOrder(self): query =", "= '%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase):", "common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self):", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140',", "# remove patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple", "in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2,", "y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions that maintain a set", "_MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY", "1) query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1)", "query.filter('y =', 3) def testIndexYamlRecording(self): composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1) query.filter('y", "def testSimpleQuery(self): # shouldn't require composite index query = db.Query(Item) query.filter('x =', 1)", "self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml()", "start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self):", "# # Unless required by applicable law or agreed to in writing, software", "db.Query(Item) query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143', '144'], query)", "express or implied. # See the License for the specific language governing permissions", "composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\"", "for strong # consistency in the High Replication Datastore. We initialize this global", "__mimic import common from __mimic import composite_query from tests import test_util from google.appengine.api", "db # A query limit large enough to return all data. _BIG_ENOUGH =", "in query.fetch(1): pass expected = \"\"\"indexes: - kind: Item properties: - name: x", "unittest from __mimic import common from __mimic import composite_query from tests import test_util", "testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([], query) def", "definitions.\"\"\" def setUp(self): # always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes =", "either express or implied. # See the License for the specific language governing", "composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected =", "'123', '122', '121', '120'], query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1)", "kind: Item properties: - name: x - name: y - name: z direction:", "import db # A query limit large enough to return all data. _BIG_ENOUGH", "# pylint: disable-msg=C6409 # add some data for x in range(5): for y", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "limit large enough to return all data. _BIG_ENOUGH = 100 _MODULE_SETUP = False", "self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar')", "query.ancestor(_ROOT_ITEM_KEY) # first check fetching all self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)])", "state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def", "'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes: bar", "enough to return all data. _BIG_ENOUGH = 100 _MODULE_SETUP = False class Item(db.Model):", "testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self):", "Datastore. We initialize this global # variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY", "require composite index query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120',", "__mimic import composite_query from tests import test_util from google.appengine.api import datastore_errors from google.appengine.ext", "after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch =", "_ in query.fetch(1): pass expected = \"\"\"indexes: - kind: Item properties: - name:", "the License. # You may obtain a copy of the License at #", "'124'], query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2)", "consistency in the High Replication Datastore. We initialize this global # variable in", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "query.order('-z') for _ in query.fetch(1): pass expected = \"\"\"indexes: - kind: Item properties:", "CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "% (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests", "setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch", "global # variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase):", "composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']),", "allows us to use ancestor queries for strong # consistency in the High", "self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should still work query = db.Query(Item) query.filter('x", "if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY", "from __mimic import composite_query from tests import test_util from google.appengine.api import datastore_errors from", "A query limit large enough to return all data. _BIG_ENOUGH = 100 _MODULE_SETUP", "e in query.fetch(limit=2, offset=1)]) def testSimpleQuery(self): # shouldn't require composite index query =", "tests for the functions that maintain a set of index definitions.\"\"\" def setUp(self):", "def testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124',", "= composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first", "google.appengine.api import datastore_errors from google.appengine.ext import db # A query limit large enough", "first check fetching all self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try", "query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143', '144'], query) def", "self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) #", "self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query", "with the License. # You may obtain a copy of the License at", "and # limitations under the License. \"\"\"Unit tests for composite_query.py.\"\"\" import unittest from", "self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries should still work query = db.Query(Item)", "'121', '122', '123', '124'], query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1)", "for _ in query.fetch(1): pass expected = \"\"\"indexes: - kind: Item properties: -", "= db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143',", "db.Query(Item) query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([], query) def testKeysOnly(self): query =", "=', 1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123', '124'], query) def testDescendingOrder(self):", "with 3 integer properties.\"\"\" x = db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty()", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "data for x in range(5): for y in range(5): for z in range(5):", "composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP =", "name = '%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class", "'122', '121', '120'], query) def testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y", "permissions and # limitations under the License. \"\"\"Unit tests for composite_query.py.\"\"\" import unittest", "composite_query.ClearIndexYaml() query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') for _", "db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) query.filter('z <', 2) self.CheckQuery(['130', '131'], query)", "setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi()", "1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123', '124'], query) def testDescendingOrder(self): query", "=', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query) def", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "= None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self):", "functions that maintain a set of index definitions.\"\"\" def setUp(self): # always start", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "entity key allows us to use ancestor queries for strong # consistency in", "[e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3], [e.key().name() for", "from tests import test_util from google.appengine.api import datastore_errors from google.appengine.ext import db #", "'131'], query) # remove patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH)", "query.filter('z <', 2) self.CheckQuery(['130', '131'], query) # remove patch and try query again", "def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self):", "e in query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3], [e.key().name() for e in", "strong # consistency in the High Replication Datastore. We initialize this global #", "data. _BIG_ENOUGH = 100 _MODULE_SETUP = False class Item(db.Model): \"\"\"A simple entity with", "self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >',", "always start with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def", "2) query.order('-z') self.CheckQuery(['124', '123', '122', '121', '120'], query) def testNonEqFilter(self): query = db.Query(Item)", "=', 2) query.order('-z') for _ in query.fetch(1): pass expected = \"\"\"indexes: - kind:", "query) def testEmptyResult(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 10) self.CheckQuery([],", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "in compliance with the License. # You may obtain a copy of the", "def setUp(self): setUp() self._patch = composite_query.CompositeQueryPatch() self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "a root entity key allows us to use ancestor queries for strong #", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "query.fetch(_BIG_ENOUGH)]) # try a slice self.assertListEqual( expected[1:3], [e.key().name() for e in query.fetch(limit=2, offset=1)])", "query.fetch, _BIG_ENOUGH) # simple queries should still work query = db.Query(Item) query.filter('x =',", "db.IntegerProperty() # Having a root entity key allows us to use ancestor queries", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "name: y - name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP", "self.CheckQuery(['140', '141', '142', '143', '144'], query) def testEmptyResult(self): query = db.Query(Item) query.filter('x =',", ">', 3) self.CheckQuery(['140', '141', '142', '143', '144'], query) def testEmptyResult(self): query = db.Query(Item)", "testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) query.filter('z <', 2)", "the specific language governing permissions and # limitations under the License. \"\"\"Unit tests", "testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') self.CheckQuery(['124', '123',", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "with a known state common.ClearPersistent(common.PERSIST_INDEX_NAME) indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo',", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143',", "- name: x - name: y - name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml())", "composite_query from tests import test_util from google.appengine.api import datastore_errors from google.appengine.ext import db", "indexes = set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz')", "# variable in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def", "y - name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP #", "remove patch and try query again self._patch.Remove() self.assertRaises(datastore_errors.NeedIndexError, query.fetch, _BIG_ENOUGH) # simple queries", "in range(5): name = '%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y,", "3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self):", "specific language governing permissions and # limitations under the License. \"\"\"Unit tests for", "query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) query.filter('z", "def testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) query.filter('z <',", "for y in range(5): for z in range(5): name = '%d%d%d' % (x,", "# Copyright 2012 Google Inc. All Rights Reserved. # # Licensed under the", "query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123',", "testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected", "self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) if __name__", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "check fetching all self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) # try a", "\"\"\"A simple entity with 3 integer properties.\"\"\" x = db.IntegerProperty() y = db.IntegerProperty()", "10) self.CheckQuery([], query) def testKeysOnly(self): query = db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y", "# Having a root entity key allows us to use ancestor queries for", "disable-msg=C6409 # add some data for x in range(5): for y in range(5):", "testClearIndexYaml(self): composite_query.ClearIndexYaml() self.assertSetEqual(set(), composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml())", "# first check fetching all self.assertListEqual( expected, [e.key().name() for e in query.fetch(_BIG_ENOUGH)]) #", "for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y", "from google.appengine.api import datastore_errors from google.appengine.ext import db # A query limit large", "=', 1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143', '144'], query) def testEmptyResult(self):", "z in range(5): name = '%d%d%d' % (x, y, z) Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x,", "1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143', '144'], [k.name() for k in", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "pylint: disable-msg=C6409 # add some data for x in range(5): for y in", "queries for strong # consistency in the High Replication Datastore. We initialize this", "import unittest from __mimic import common from __mimic import composite_query from tests import", "testNonEqFilter(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142',", "True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409", "query.filter('x =', 1) query.filter('y =', 2) query.order('-z') for _ in query.fetch(1): pass expected", "key allows us to use ancestor queries for strong # consistency in the", "self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global _MODULE_SETUP # pylint: disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP", "for the specific language governing permissions and # limitations under the License. \"\"\"Unit", "'123', '124'], query) def testDescendingOrder(self): query = db.Query(Item) query.filter('x =', 1) query.filter('y =',", "db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty() # Having a root entity key", "parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions that maintain", "import composite_query from tests import test_util from google.appengine.api import datastore_errors from google.appengine.ext import", "root entity key allows us to use ancestor queries for strong # consistency", "'142', '143', '144'], [k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item)", "properties: - name: x - name: y - name: z direction: desc\"\"\" self.assertEquals(expected,", "self._patch.Install() def tearDown(self): self._patch.Remove() def CheckQuery(self, expected, query): query.ancestor(_ROOT_ITEM_KEY) # first check fetching", "disable-msg=W0603 if _MODULE_SETUP: return _MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603", "= db.IntegerProperty() # Having a root entity key allows us to use ancestor", "in setUp after calling test_util.InitAppHostingApi(). _ROOT_ITEM_KEY = None class CompositeQueryTest(unittest.TestCase): def setUp(self): setUp()", "composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def testDuplicatesIgnored(self): composite_query._RecordIndex('bar') self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testClearIndexYaml(self):", "_MODULE_SETUP = False class Item(db.Model): \"\"\"A simple entity with 3 integer properties.\"\"\" x", "query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 2) query.order('-z') for _ in", "def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo', 'bar', 'baz']), composite_query._ReadIndexes()) def", "= set(['foo', 'bar']) composite_query._WriteIndexes(indexes) def testReadIndexes(self): self.assertSetEqual(set(['foo', 'bar']), composite_query._ReadIndexes()) def testRecordIndex(self): composite_query._RecordIndex('baz') self.assertSetEqual(set(['foo',", "test_util from google.appengine.api import datastore_errors from google.appengine.ext import db # A query limit", "- name: y - name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp(): global", "offset=1)]) def testSimpleQuery(self): # shouldn't require composite index query = db.Query(Item) query.filter('x =',", "Item(key_name=name, parent=_ROOT_ITEM_KEY, x=x, y=y, z=z).put() class IndexYamlTest(unittest.TestCase): \"\"\"Unit tests for the functions that", "1) query.filter('y >', 3) self.CheckQuery(['140', '141', '142', '143', '144'], query) def testEmptyResult(self): query", "db.Query(Item, keys_only=True) query.filter('x =', 1) query.filter('y >', 3) self.assertListEqual(['140', '141', '142', '143', '144'],", "us to use ancestor queries for strong # consistency in the High Replication", "x - name: y - name: z direction: desc\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) def setUp():", "properties.\"\"\" x = db.IntegerProperty() y = db.IntegerProperty() z = db.IntegerProperty() # Having a", "_MODULE_SETUP = True test_util.InitAppHostingApi() global _ROOT_ITEM_KEY # pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') #", "query.filter('x =', 1) query.filter('y =', 2) self.CheckQuery(['120', '121', '122', '123', '124'], query) def", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "[k.name() for k in query.fetch(_BIG_ENOUGH)]) def testPatchRemoval(self): query = db.Query(Item) query.filter('x =', 1)", "# pylint: disable-msg=W0603 _ROOT_ITEM_KEY = Item(key_name='root_entity') # pylint: disable-msg=C6409 # add some data", "still work query = db.Query(Item) query.filter('x =', 1) query.filter('y =', 3) def testIndexYamlRecording(self):", "<', 2) self.CheckQuery(['130', '131'], query) # remove patch and try query again self._patch.Remove()", "composite_query._ReadIndexes()) def testGetIndexYaml(self): expected = \"\"\"indexes: bar foo\"\"\" self.assertEquals(expected, composite_query.GetIndexYaml()) if __name__ ==", "=', 2) self.CheckQuery(['120', '121', '122', '123', '124'], query) def testDescendingOrder(self): query = db.Query(Item)", "entity with 3 integer properties.\"\"\" x = db.IntegerProperty() y = db.IntegerProperty() z =", "google.appengine.ext import db # A query limit large enough to return all data.", "= \"\"\"indexes: - kind: Item properties: - name: x - name: y -" ]
[ "import extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source as long as condition holds", "import Observable from rx.internal import extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source as", "be repeated. Returns an observable {Observable} sequence which is repeated as long as", "which is repeated as long as the condition holds. \"\"\" return Observable.concat([self, Observable.while_do(condition,", "repeated. Returns an observable {Observable} sequence which is repeated as long as the", "will be repeated. Returns an observable {Observable} sequence which is repeated as long", "@extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source as long as condition holds emulating a", "source as long as condition holds emulating a do while loop. Keyword arguments:", "sequence which is repeated as long as the condition holds. \"\"\" return Observable.concat([self,", "observable {Observable} sequence which is repeated as long as the condition holds. \"\"\"", "def do_while(self, condition): \"\"\"Repeats source as long as condition holds emulating a do", "do while loop. Keyword arguments: condition -- {Function} The condition which determines if", "from rx.internal import extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source as long as", "an observable {Observable} sequence which is repeated as long as the condition holds.", "-- {Function} The condition which determines if the source will be repeated. Returns", "as long as condition holds emulating a do while loop. Keyword arguments: condition", "arguments: condition -- {Function} The condition which determines if the source will be", "emulating a do while loop. Keyword arguments: condition -- {Function} The condition which", "source will be repeated. Returns an observable {Observable} sequence which is repeated as", "\"\"\"Repeats source as long as condition holds emulating a do while loop. Keyword", "condition -- {Function} The condition which determines if the source will be repeated.", "long as condition holds emulating a do while loop. Keyword arguments: condition --", "The condition which determines if the source will be repeated. Returns an observable", "{Function} The condition which determines if the source will be repeated. Returns an", "{Observable} sequence which is repeated as long as the condition holds. \"\"\" return", "while loop. Keyword arguments: condition -- {Function} The condition which determines if the", "Observable from rx.internal import extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source as long", "rx.core import Observable from rx.internal import extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source", "Keyword arguments: condition -- {Function} The condition which determines if the source will", "the source will be repeated. Returns an observable {Observable} sequence which is repeated", "is repeated as long as the condition holds. \"\"\" return Observable.concat([self, Observable.while_do(condition, self)])", "from rx.core import Observable from rx.internal import extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats", "extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source as long as condition holds emulating", "a do while loop. Keyword arguments: condition -- {Function} The condition which determines", "loop. Keyword arguments: condition -- {Function} The condition which determines if the source", "holds emulating a do while loop. Keyword arguments: condition -- {Function} The condition", "rx.internal import extensionmethod @extensionmethod(Observable) def do_while(self, condition): \"\"\"Repeats source as long as condition", "condition): \"\"\"Repeats source as long as condition holds emulating a do while loop.", "condition holds emulating a do while loop. Keyword arguments: condition -- {Function} The", "which determines if the source will be repeated. Returns an observable {Observable} sequence", "determines if the source will be repeated. Returns an observable {Observable} sequence which", "condition which determines if the source will be repeated. Returns an observable {Observable}", "Returns an observable {Observable} sequence which is repeated as long as the condition", "as condition holds emulating a do while loop. Keyword arguments: condition -- {Function}", "do_while(self, condition): \"\"\"Repeats source as long as condition holds emulating a do while", "if the source will be repeated. Returns an observable {Observable} sequence which is" ]
[ "concept of a semi-persistant setting on the UAV. The setting may be read/updated", "import time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING", "providing noisy data (usually for testing) \"\"\" def value(self): \"\"\" :returns: the next", "def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt = 0.0", "= time.time() self.dt += (self.freq * (t - self.t)) self.t = t val", "Generates a noisy random walk \"\"\" def __init__(self, start, end, delta, value_type=float): self.v", "create groundstation and other monitoring software for interacting with the UAV. The library", "= amplitude self.type = value_type #add 1 to sine to keep +ve if", "= self.amp + n def value(self): t = time.time() self.dt += (self.freq *", "- self.t)) self.t = t val = (self.offset * math.sin(self.dt)) * self.amp noise", "value(self): \"\"\" :returns: the next value \"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates", "+ (self.delta * random.randrange(0.0,1.0, int=float)) if self.start > self.end: if v > self.end", "class NoisySine(_Noisy): \"\"\" Generates a noisy sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float,", "characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\"", "self.type(noise + val) class NoisyWalk(_Noisy): \"\"\" Generates a noisy random walk \"\"\" def", "n = (noise_pct/100.0) * self.amp self.n1 = self.amp - n self.n2 = self.amp", "def value(self): v = self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if self.start >", "noise is x percent of the amplitude n = (noise_pct/100.0) * self.amp self.n1", "self.delta = delta self.type = value_type def value(self): return self.type(self.v + (self.delta *", "+ n def value(self): t = time.time() self.dt += (self.freq * (t -", "0.0 self.freq = freq self.amp = amplitude self.type = value_type #add 1 to", "self.t = time.time() self.dt = 0.0 self.freq = freq self.amp = amplitude self.type", "} class _Noisy: \"\"\" An interface for objects providing noisy data (usually for", "return self.type(noise + val) class NoisyWalk(_Noisy): \"\"\" Generates a noisy random walk \"\"\"", "math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val) class", "\"uint16\" : 2, \"int16\" : 2, \"uint32\" : 4, \"int32\" : 4, \"float\"", "to its length in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP = { \"char\"", "os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C type to its length in bytes (e.g", "__init__(self, value, delta, value_type=float): self.v = value self.delta = delta self.type = value_type", "0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug types to", "self.amp self.n1 = self.amp - n self.n2 = self.amp + n def value(self):", "Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v = value self.delta = delta self.type", "\"\"\" Generates a noisy sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10):", "(self.delta * random.randrange(0.0,1.0, int=float)) if self.start > self.end: if v > self.end and", "float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the", ":returns: the next value \"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a noisy", "v = self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if self.start > self.end: if", "correct format string TYPE_TO_PRINT_MAP = { float : \"%f\", str : \"%s\", chr", "= value_type #add 1 to sine to keep +ve if positive: self.offset =", "time.time() self.dt = 0.0 self.freq = freq self.amp = amplitude self.type = value_type", "be read/updated from the groundstation and stored on the UAV \"\"\" import os", "#add 1 to sine to keep +ve if positive: self.offset = 1.0 else:", "on the UAV \"\"\" import os import random import time import math HOME_LAT", "= v else: if v < self.end and v > self.start: self.v =", "types to format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\",", "UAVs running the wasp software system. This library can be used to create", "= 1.0 else: self.offset = 0.0 #the noise is x percent of the", "self.t)) self.t = t val = (self.offset * math.sin(self.dt)) * self.amp noise =", "self.end: if v > self.end and v < self.start: self.v = v else:", "start self.delta = delta self.type = value_type def value(self): v = self.v +", "- n self.n2 = self.amp + n def value(self): t = time.time() self.dt", "over the chosen communication channel from the UAV to the groundstation * *settings.xml*", "def __init__(self, start, end, delta, value_type=float): self.v = start self.end = end self.start", "1) TYPE_TO_LENGTH_MAP = { \"char\" : 1, \"uint8\" : 1, \"int8\" : 1,", "of the amplitude n = (noise_pct/100.0) * self.amp self.n1 = self.amp - n", ": 1, \"int8\" : 1, \"uint16\" : 2, \"int16\" : 2, \"uint32\" :", "self.end and v < self.start: self.v = v else: if v < self.end", "self.end and v > self.start: self.v = v return self.type(self.v) class Noisy(_Noisy): def", "= value self.delta = delta self.type = value_type def value(self): return self.type(self.v +", "\"%s\", chr : \"%c\", int : \"%d\" } ACID_ALL = 0xFF ACID_TEST =", "walk \"\"\" def __init__(self, start, end, delta, value_type=float): self.v = start self.end =", "a concept of a semi-persistant setting on the UAV. The setting may be", "the chosen communication channel from the UAV to the groundstation * *settings.xml* -", "sent over the chosen communication channel from the UAV to the groundstation *", "value_type #add 1 to sine to keep +ve if positive: self.offset = 1.0", "the C type to correct format string TYPE_TO_PRINT_MAP = { float : \"%f\",", "C type to its length in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP =", "(self.freq * (t - self.t)) self.t = t val = (self.offset * math.sin(self.dt))", "wasp software system. This library can be used to create groundstation and other", "raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a noisy sinewave \"\"\" def __init__(self, freq=1.0,", "value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt = 0.0 self.freq = freq self.amp", "length in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP = { \"char\" : 1,", "def __init__(self, value, delta, value_type=float): self.v = value self.delta = delta self.type =", "start self.end = end self.start = start self.delta = delta self.type = value_type", "import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\")", "for interacting with UAVs running the wasp software system. This library can be", "val) class NoisyWalk(_Noisy): \"\"\" Generates a noisy random walk \"\"\" def __init__(self, start,", "v > self.start: self.v = v return self.type(self.v) class Noisy(_Noisy): def __init__(self, value,", "= os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C type to its length in bytes", "messages sent over the chosen communication channel from the UAV to the groundstation", "= start self.end = end self.start = start self.delta = delta self.type =", "self.start > self.end: if v > self.end and v < self.start: self.v =", "\"char\" : 1, \"uint8\" : 1, \"int8\" : 1, \"uint16\" : 2, \"int16\"", ": \"%#f\" } class _Noisy: \"\"\" An interface for objects providing noisy data", "NoisySine(_Noisy): \"\"\" Generates a noisy sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True,", "value_type=float): self.v = value self.delta = delta self.type = value_type def value(self): return", "\"float\" : 4, } #: dictionary mapping the C type to correct format", "the UAV \"\"\" import os import random import time import math HOME_LAT =", "int : \"%d\" } ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD", "\"\"\" Generates a noisy random walk \"\"\" def __init__(self, start, end, delta, value_type=float):", "and stored on the UAV \"\"\" import os import random import time import", "self.amp - n self.n2 = self.amp + n def value(self): t = time.time()", "char -> 1) TYPE_TO_LENGTH_MAP = { \"char\" : 1, \"uint8\" : 1, \"int8\"", "setting on the UAV. The setting may be read/updated from the groundstation and", "and other monitoring software for interacting with the UAV. The library is coupled", "may be read/updated from the groundstation and stored on the UAV \"\"\" import", "= float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping", "val = (self.offset * math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return", "\"%f\", str : \"%s\", chr : \"%c\", int : \"%d\" } ACID_ALL =", "\"\"\" import os import random import time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451))", "interacting with the UAV. The library is coupled with the onboard sofware througn", "self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy): \"\"\"", "#: dictionary mapping the C type to correct format string TYPE_TO_PRINT_MAP = {", "mapping the C type to its length in bytes (e.g char -> 1)", "= (noise_pct/100.0) * self.amp self.n1 = self.amp - n self.n2 = self.amp +", "value \"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a noisy sinewave \"\"\" def", "self.end = end self.start = start self.delta = delta self.type = value_type def", "its length in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP = { \"char\" :", "amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt = 0.0 self.freq = freq", "= 0.0 self.freq = freq self.amp = amplitude self.type = value_type #add 1", "\"\"\" libwasp is a library for interacting with UAVs running the wasp software", "TYPE_TO_PRINT_MAP = { float : \"%f\", str : \"%s\", chr : \"%c\", int", "self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v = value self.delta =", "{ \"char\" : 1, \"uint8\" : 1, \"int8\" : 1, \"uint16\" : 2,", "0xFD #: dictionary mapping debug types to format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\"", "interface for objects providing noisy data (usually for testing) \"\"\" def value(self): \"\"\"", "Generates a noisy sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t", "value, delta, value_type=float): self.v = value self.delta = delta self.type = value_type def", "interacting with UAVs running the wasp software system. This library can be used", "software system. This library can be used to create groundstation and other monitoring", "self.v = start self.end = end self.start = start self.delta = delta self.type", "a noisy random walk \"\"\" def __init__(self, start, end, delta, value_type=float): self.v =", "self.v = v else: if v < self.end and v > self.start: self.v", "1, \"uint8\" : 1, \"int8\" : 1, \"uint16\" : 2, \"int16\" : 2,", "*settings.xml* - a concept of a semi-persistant setting on the UAV. The setting", "= (self.offset * math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise", "ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug", "if positive: self.offset = 1.0 else: self.offset = 0.0 #the noise is x", "- the defintion of messages sent over the chosen communication channel from the", "2, \"uint32\" : 4, \"int32\" : 4, \"float\" : 4, } #: dictionary", ": 4, \"float\" : 4, } #: dictionary mapping the C type to", "coupled with the onboard sofware througn * *messages.xml* - the defintion of messages", "_Noisy: \"\"\" An interface for objects providing noisy data (usually for testing) \"\"\"", "througn * *messages.xml* - the defintion of messages sent over the chosen communication", "= { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class", "channel from the UAV to the groundstation * *settings.xml* - a concept of", "value self.delta = delta self.type = value_type def value(self): return self.type(self.v + (self.delta", "the C type to its length in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP", "\"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class _Noisy: \"\"\"", "amplitude n = (noise_pct/100.0) * self.amp self.n1 = self.amp - n self.n2 =", "4, } #: dictionary mapping the C type to correct format string TYPE_TO_PRINT_MAP", "HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C type", ": 4, \"int32\" : 4, \"float\" : 4, } #: dictionary mapping the", "= random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy): \"\"\" Generates a", "to correct format string TYPE_TO_PRINT_MAP = { float : \"%f\", str : \"%s\",", "import random import time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\",", "delta self.type = value_type def value(self): return self.type(self.v + (self.delta * random.randrange(0.0,1.0, int=float)))", "else: self.offset = 0.0 #the noise is x percent of the amplitude n", "self.dt += (self.freq * (t - self.t)) self.t = t val = (self.offset", "self.offset = 1.0 else: self.offset = 0.0 #the noise is x percent of", "for interacting with the UAV. The library is coupled with the onboard sofware", "} ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping", "< self.start: self.v = v else: if v < self.end and v >", "UAV. The setting may be read/updated from the groundstation and stored on the", "< self.end and v > self.start: self.v = v return self.type(self.v) class Noisy(_Noisy):", "library can be used to create groundstation and other monitoring software for interacting", "def value(self): \"\"\" :returns: the next value \"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\"", "noisy sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time()", "to create groundstation and other monitoring software for interacting with the UAV. The", "\"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt =", "for testing) \"\"\" def value(self): \"\"\" :returns: the next value \"\"\" raise NotImplementedError", "format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" :", "int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy): \"\"\" Generates a noisy random walk", "other monitoring software for interacting with the UAV. The library is coupled with", "= value_type def value(self): v = self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if", "= end self.start = start self.delta = delta self.type = value_type def value(self):", "dictionary mapping debug types to format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\",", "= self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if self.start > self.end: if v", "= 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug types", "delta self.type = value_type def value(self): v = self.v + (self.delta * random.randrange(0.0,1.0,", "v else: if v < self.end and v > self.start: self.v = v", "172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C type to its length", "string TYPE_TO_PRINT_MAP = { float : \"%f\", str : \"%s\", chr : \"%c\",", "= 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug types to format characters", "#: dictionary mapping debug types to format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" :", "self.freq = freq self.amp = amplitude self.type = value_type #add 1 to sine", "= delta self.type = value_type def value(self): v = self.v + (self.delta *", "if v > self.end and v < self.start: self.v = v else: if", "class Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v = value self.delta = delta", "\"DEBUG_FLOAT\" : \"%#f\" } class _Noisy: \"\"\" An interface for objects providing noisy", "#the noise is x percent of the amplitude n = (noise_pct/100.0) * self.amp", "def value(self): t = time.time() self.dt += (self.freq * (t - self.t)) self.t", "self.start: self.v = v else: if v < self.end and v > self.start:", "delta, value_type=float): self.v = value self.delta = delta self.type = value_type def value(self):", "{ float : \"%f\", str : \"%s\", chr : \"%c\", int : \"%d\"", "DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\" }", "bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP = { \"char\" : 1, \"uint8\" :", "sofware througn * *messages.xml* - the defintion of messages sent over the chosen", "1.0 else: self.offset = 0.0 #the noise is x percent of the amplitude", "(usually for testing) \"\"\" def value(self): \"\"\" :returns: the next value \"\"\" raise", "the UAV. The setting may be read/updated from the groundstation and stored on", "self.dt = 0.0 self.freq = freq self.amp = amplitude self.type = value_type #add", "onboard sofware througn * *messages.xml* - the defintion of messages sent over the", "of a semi-persistant setting on the UAV. The setting may be read/updated from", "\"\"\" An interface for objects providing noisy data (usually for testing) \"\"\" def", "* *messages.xml* - the defintion of messages sent over the chosen communication channel", "= start self.delta = delta self.type = value_type def value(self): v = self.v", "self.t = t val = (self.offset * math.sin(self.dt)) * self.amp noise = random.randrange(self.n1,", "An interface for objects providing noisy data (usually for testing) \"\"\" def value(self):", "mapping the C type to correct format string TYPE_TO_PRINT_MAP = { float :", "data (usually for testing) \"\"\" def value(self): \"\"\" :returns: the next value \"\"\"", "libwasp is a library for interacting with UAVs running the wasp software system.", ": 1, \"uint16\" : 2, \"int16\" : 2, \"uint32\" : 4, \"int32\" :", "This library can be used to create groundstation and other monitoring software for", "next value \"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a noisy sinewave \"\"\"", ": \"%f\", str : \"%s\", chr : \"%c\", int : \"%d\" } ACID_ALL", "self.start = start self.delta = delta self.type = value_type def value(self): v =", "amplitude self.type = value_type #add 1 to sine to keep +ve if positive:", "-43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C", "ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug types to format characters DEBUG_MESSAGES =", "communication channel from the UAV to the groundstation * *settings.xml* - a concept", "delta, value_type=float): self.v = start self.end = end self.start = start self.delta =", "start, end, delta, value_type=float): self.v = start self.end = end self.start = start", "stored on the UAV \"\"\" import os import random import time import math", "if self.start > self.end: if v > self.end and v < self.start: self.v", "setting may be read/updated from the groundstation and stored on the UAV \"\"\"", "noise_pct=10): self.t = time.time() self.dt = 0.0 self.freq = freq self.amp = amplitude", "= 0.0 #the noise is x percent of the amplitude n = (noise_pct/100.0)", "used to create groundstation and other monitoring software for interacting with the UAV.", "math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #:", "= { float : \"%f\", str : \"%s\", chr : \"%c\", int :", "NoisyWalk(_Noisy): \"\"\" Generates a noisy random walk \"\"\" def __init__(self, start, end, delta,", "and v < self.start: self.v = v else: if v < self.end and", "is a library for interacting with UAVs running the wasp software system. This", "* self.amp self.n1 = self.amp - n self.n2 = self.amp + n def", "* math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val)", "the onboard sofware througn * *messages.xml* - the defintion of messages sent over", ": 2, \"uint32\" : 4, \"int32\" : 4, \"float\" : 4, } #:", "self.delta = delta self.type = value_type def value(self): v = self.v + (self.delta", "t val = (self.offset * math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2, int=self.type)", "the amplitude n = (noise_pct/100.0) * self.amp self.n1 = self.amp - n self.n2", "> self.end and v < self.start: self.v = v else: if v <", "the groundstation and stored on the UAV \"\"\" import os import random import", "self.n2, int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy): \"\"\" Generates a noisy random", "type to correct format string TYPE_TO_PRINT_MAP = { float : \"%f\", str :", "4, \"float\" : 4, } #: dictionary mapping the C type to correct", "\"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class _Noisy: \"\"\" An interface", "chosen communication channel from the UAV to the groundstation * *settings.xml* - a", "end, delta, value_type=float): self.v = start self.end = end self.start = start self.delta", "TYPE_TO_LENGTH_MAP = { \"char\" : 1, \"uint8\" : 1, \"int8\" : 1, \"uint16\"", "__init__(self, start, end, delta, value_type=float): self.v = start self.end = end self.start =", "UAV \"\"\" import os import random import time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\",", "if v < self.end and v > self.start: self.v = v return self.type(self.v)", "value_type=float): self.v = start self.end = end self.start = start self.delta = delta", "= time.time() self.dt = 0.0 self.freq = freq self.amp = amplitude self.type =", "= self.amp - n self.n2 = self.amp + n def value(self): t =", "import os import random import time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON", "IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C type to its length in", "t = time.time() self.dt += (self.freq * (t - self.t)) self.t = t", "keep +ve if positive: self.offset = 1.0 else: self.offset = 0.0 #the noise", "= v return self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v =", "random.randrange(0.0,1.0, int=float)) if self.start > self.end: if v > self.end and v <", "= 0xFD #: dictionary mapping debug types to format characters DEBUG_MESSAGES = {", "0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug types to format characters DEBUG_MESSAGES", "\"\"\" :returns: the next value \"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a", "mapping debug types to format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\"", "is coupled with the onboard sofware througn * *messages.xml* - the defintion of", "+ve if positive: self.offset = 1.0 else: self.offset = 0.0 #the noise is", "time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING =", "to keep +ve if positive: self.offset = 1.0 else: self.offset = 0.0 #the", "* self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy):", "library is coupled with the onboard sofware througn * *messages.xml* - the defintion", "to the groundstation * *settings.xml* - a concept of a semi-persistant setting on", "freq self.amp = amplitude self.type = value_type #add 1 to sine to keep", "random import time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377))", "else: if v < self.end and v > self.start: self.v = v return", "(e.g char -> 1) TYPE_TO_LENGTH_MAP = { \"char\" : 1, \"uint8\" : 1,", "and v > self.start: self.v = v return self.type(self.v) class Noisy(_Noisy): def __init__(self,", "type to its length in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP = {", "(noise_pct/100.0) * self.amp self.n1 = self.amp - n self.n2 = self.amp + n", "{ \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class _Noisy:", "for objects providing noisy data (usually for testing) \"\"\" def value(self): \"\"\" :returns:", "= { \"char\" : 1, \"uint8\" : 1, \"int8\" : 1, \"uint16\" :", ": \"%d\" } ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #:", "read/updated from the groundstation and stored on the UAV \"\"\" import os import", "dictionary mapping the C type to correct format string TYPE_TO_PRINT_MAP = { float", "} #: dictionary mapping the C type to correct format string TYPE_TO_PRINT_MAP =", "\"int8\" : 1, \"uint16\" : 2, \"int16\" : 2, \"uint32\" : 4, \"int32\"", "The library is coupled with the onboard sofware througn * *messages.xml* - the", "running the wasp software system. This library can be used to create groundstation", ": \"%s\", chr : \"%c\", int : \"%d\" } ACID_ALL = 0xFF ACID_TEST", "float : \"%f\", str : \"%s\", chr : \"%c\", int : \"%d\" }", "str : \"%s\", chr : \"%c\", int : \"%d\" } ACID_ALL = 0xFF", "value_type def value(self): v = self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if self.start", "HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON = float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary", "n self.n2 = self.amp + n def value(self): t = time.time() self.dt +=", "self.type = value_type def value(self): v = self.v + (self.delta * random.randrange(0.0,1.0, int=float))", "-> 1) TYPE_TO_LENGTH_MAP = { \"char\" : 1, \"uint8\" : 1, \"int8\" :", "positive=True, noise_pct=10): self.t = time.time() self.dt = 0.0 self.freq = freq self.amp =", "\"\"\" def __init__(self, start, end, delta, value_type=float): self.v = start self.end = end", "\"uint32\" : 4, \"int32\" : 4, \"float\" : 4, } #: dictionary mapping", "v < self.end and v > self.start: self.v = v return self.type(self.v) class", "groundstation and stored on the UAV \"\"\" import os import random import time", "0.0 #the noise is x percent of the amplitude n = (noise_pct/100.0) *", "class NoisyWalk(_Noisy): \"\"\" Generates a noisy random walk \"\"\" def __init__(self, start, end,", "freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt = 0.0 self.freq =", "noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy): \"\"\" Generates", "(t - self.t)) self.t = t val = (self.offset * math.sin(self.dt)) * self.amp", "value(self): v = self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if self.start > self.end:", "* random.randrange(0.0,1.0, int=float)) if self.start > self.end: if v > self.end and v", "self.start: self.v = v return self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta, value_type=float):", "\"%d\" } ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary", "UAV. The library is coupled with the onboard sofware througn * *messages.xml* -", "v < self.start: self.v = v else: if v < self.end and v", "= freq self.amp = amplitude self.type = value_type #add 1 to sine to", "in bytes (e.g char -> 1) TYPE_TO_LENGTH_MAP = { \"char\" : 1, \"uint8\"", "+= (self.freq * (t - self.t)) self.t = t val = (self.offset *", "on the UAV. The setting may be read/updated from the groundstation and stored", "(self.offset * math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2, int=self.type) return self.type(noise +", "value(self): t = time.time() self.dt += (self.freq * (t - self.t)) self.t =", "to sine to keep +ve if positive: self.offset = 1.0 else: self.offset =", "the UAV to the groundstation * *settings.xml* - a concept of a semi-persistant", "objects providing noisy data (usually for testing) \"\"\" def value(self): \"\"\" :returns: the", ": \"%c\", int : \"%d\" } ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION", "with the UAV. The library is coupled with the onboard sofware througn *", "1, \"int8\" : 1, \"uint16\" : 2, \"int16\" : 2, \"uint32\" : 4,", "1 to sine to keep +ve if positive: self.offset = 1.0 else: self.offset", "percent of the amplitude n = (noise_pct/100.0) * self.amp self.n1 = self.amp -", "sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt", "dictionary mapping the C type to its length in bytes (e.g char ->", "UAV to the groundstation * *settings.xml* - a concept of a semi-persistant setting", "be used to create groundstation and other monitoring software for interacting with the", "= t val = (self.offset * math.sin(self.dt)) * self.amp noise = random.randrange(self.n1, self.n2,", "4, \"int32\" : 4, \"float\" : 4, } #: dictionary mapping the C", "noisy data (usually for testing) \"\"\" def value(self): \"\"\" :returns: the next value", "self.v = value self.delta = delta self.type = value_type def value(self): return self.type(self.v", "#: dictionary mapping the C type to its length in bytes (e.g char", "the groundstation * *settings.xml* - a concept of a semi-persistant setting on the", "\"%c\", int : \"%d\" } ACID_ALL = 0xFF ACID_TEST = 0xFE ACID_GROUNDSTATION =", "v > self.end and v < self.start: self.v = v else: if v", "self.v + (self.delta * random.randrange(0.0,1.0, int=float)) if self.start > self.end: if v >", "\"%#f\" } class _Noisy: \"\"\" An interface for objects providing noisy data (usually", "random.randrange(self.n1, self.n2, int=self.type) return self.type(noise + val) class NoisyWalk(_Noisy): \"\"\" Generates a noisy", "2, \"int16\" : 2, \"uint32\" : 4, \"int32\" : 4, \"float\" : 4,", "*messages.xml* - the defintion of messages sent over the chosen communication channel from", "time.time() self.dt += (self.freq * (t - self.t)) self.t = t val =", "from the groundstation and stored on the UAV \"\"\" import os import random", "\"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class _Noisy: \"\"\" An interface for objects providing", "float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C type to its", "= float(os.environ.get(\"WASP_HOME_LON\", 172.582377)) IS_TESTING = os.environ.get(\"WASP_IS_TESTING\") #: dictionary mapping the C type to", "1, \"uint16\" : 2, \"int16\" : 2, \"uint32\" : 4, \"int32\" : 4,", "\"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class _Noisy: \"\"\" An interface for", "from the UAV to the groundstation * *settings.xml* - a concept of a", "a noisy sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t =", "- a concept of a semi-persistant setting on the UAV. The setting may", "positive: self.offset = 1.0 else: self.offset = 0.0 #the noise is x percent", "system. This library can be used to create groundstation and other monitoring software", "NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a noisy sinewave \"\"\" def __init__(self, freq=1.0, amplitude=50.0,", "can be used to create groundstation and other monitoring software for interacting with", "<gh_stars>1-10 \"\"\" libwasp is a library for interacting with UAVs running the wasp", "* (t - self.t)) self.t = t val = (self.offset * math.sin(self.dt)) *", "a library for interacting with UAVs running the wasp software system. This library", ": \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class _Noisy: \"\"\" An", "the wasp software system. This library can be used to create groundstation and", "self.offset = 0.0 #the noise is x percent of the amplitude n =", "end self.start = start self.delta = delta self.type = value_type def value(self): v", ": \"%d\", \"DEBUG_FLOAT\" : \"%#f\" } class _Noisy: \"\"\" An interface for objects", "the UAV. The library is coupled with the onboard sofware througn * *messages.xml*", "\"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a noisy sinewave \"\"\" def __init__(self,", "\"uint8\" : 1, \"int8\" : 1, \"uint16\" : 2, \"int16\" : 2, \"uint32\"", "int=float)) if self.start > self.end: if v > self.end and v < self.start:", "x percent of the amplitude n = (noise_pct/100.0) * self.amp self.n1 = self.amp", "__init__(self, freq=1.0, amplitude=50.0, value_type=float, positive=True, noise_pct=10): self.t = time.time() self.dt = 0.0 self.freq", "C type to correct format string TYPE_TO_PRINT_MAP = { float : \"%f\", str", "= delta self.type = value_type def value(self): return self.type(self.v + (self.delta * random.randrange(0.0,1.0,", "format string TYPE_TO_PRINT_MAP = { float : \"%f\", str : \"%s\", chr :", "chr : \"%c\", int : \"%d\" } ACID_ALL = 0xFF ACID_TEST = 0xFE", "self.type = value_type #add 1 to sine to keep +ve if positive: self.offset", "class _Noisy: \"\"\" An interface for objects providing noisy data (usually for testing)", "self.amp + n def value(self): t = time.time() self.dt += (self.freq * (t", "with the onboard sofware througn * *messages.xml* - the defintion of messages sent", "self.n2 = self.amp + n def value(self): t = time.time() self.dt += (self.freq", ": 4, } #: dictionary mapping the C type to correct format string", "semi-persistant setting on the UAV. The setting may be read/updated from the groundstation", "+ val) class NoisyWalk(_Noisy): \"\"\" Generates a noisy random walk \"\"\" def __init__(self,", "with UAVs running the wasp software system. This library can be used to", "sine to keep +ve if positive: self.offset = 1.0 else: self.offset = 0.0", "\"\"\" def value(self): \"\"\" :returns: the next value \"\"\" raise NotImplementedError class NoisySine(_Noisy):", "* *settings.xml* - a concept of a semi-persistant setting on the UAV. The", "is x percent of the amplitude n = (noise_pct/100.0) * self.amp self.n1 =", "self.amp = amplitude self.type = value_type #add 1 to sine to keep +ve", "library for interacting with UAVs running the wasp software system. This library can", "the defintion of messages sent over the chosen communication channel from the UAV", ": 2, \"int16\" : 2, \"uint32\" : 4, \"int32\" : 4, \"float\" :", "the next value \"\"\" raise NotImplementedError class NoisySine(_Noisy): \"\"\" Generates a noisy sinewave", "groundstation and other monitoring software for interacting with the UAV. The library is", ": 1, \"uint8\" : 1, \"int8\" : 1, \"uint16\" : 2, \"int16\" :", "groundstation * *settings.xml* - a concept of a semi-persistant setting on the UAV.", "of messages sent over the chosen communication channel from the UAV to the", "The setting may be read/updated from the groundstation and stored on the UAV", "self.n1 = self.amp - n self.n2 = self.amp + n def value(self): t", "ACID_TEST = 0xFE ACID_GROUNDSTATION = 0xFD #: dictionary mapping debug types to format", "\"int16\" : 2, \"uint32\" : 4, \"int32\" : 4, \"float\" : 4, }", "a semi-persistant setting on the UAV. The setting may be read/updated from the", "n def value(self): t = time.time() self.dt += (self.freq * (t - self.t))", "os import random import time import math HOME_LAT = float(os.environ.get(\"WASP_HOME_LAT\", -43.520451)) HOME_LON =", "return self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v = value self.delta", "v return self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v = value", "self.v = v return self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta, value_type=float): self.v", "\"int32\" : 4, \"float\" : 4, } #: dictionary mapping the C type", "monitoring software for interacting with the UAV. The library is coupled with the", "to format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" : \"%d\", \"DEBUG_FLOAT\"", "software for interacting with the UAV. The library is coupled with the onboard", "defintion of messages sent over the chosen communication channel from the UAV to", "> self.end: if v > self.end and v < self.start: self.v = v", "> self.start: self.v = v return self.type(self.v) class Noisy(_Noisy): def __init__(self, value, delta,", "testing) \"\"\" def value(self): \"\"\" :returns: the next value \"\"\" raise NotImplementedError class", "debug types to format characters DEBUG_MESSAGES = { \"DEBUG_UINT8\" : \"%d\", \"DEBUG_INT32\" :", "random walk \"\"\" def __init__(self, start, end, delta, value_type=float): self.v = start self.end", "noisy random walk \"\"\" def __init__(self, start, end, delta, value_type=float): self.v = start" ]
[ "q = set(q) return q dataset_path = './data' dataset_name = 'cornell' dataset_path =", "search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q =", "set(q) return q dataset_path = './data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name)", "import networkx as nx import scipy.sparse as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj,", "q dataset_path = './data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj =", "import os import networkx as nx import scipy.sparse as sp def search_cycle(dir_adj): dir_adj", "os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj = dir_adj.tocsc() q = search_cycle(dir_adj) print(q)", "= os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj = dir_adj.tocsc() q = search_cycle(dir_adj)", "list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = [] for i in range(num_cycle): q.append(len(cycles[i])) q", "= set(q) return q dataset_path = './data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path,", "networkx as nx import scipy.sparse as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph)", "sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles)", "= 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj = dir_adj.tocsc()", "def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q", "nx import scipy.sparse as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles =", "cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = [] for i in range(num_cycle):", "'./data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj", "create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = [] for i in", "= './data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz'))", "for i in range(num_cycle): q.append(len(cycles[i])) q = set(q) return q dataset_path = './data'", "dataset_path = './data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path,", "= len(cycles) q = [] for i in range(num_cycle): q.append(len(cycles[i])) q = set(q)", "len(cycles) q = [] for i in range(num_cycle): q.append(len(cycles[i])) q = set(q) return", "dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = []", "as nx import scipy.sparse as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles", "q.append(len(cycles[i])) q = set(q) return q dataset_path = './data' dataset_name = 'cornell' dataset_path", "dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj =", "nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = [] for i", "= [] for i in range(num_cycle): q.append(len(cycles[i])) q = set(q) return q dataset_path", "= list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = [] for i in range(num_cycle): q.append(len(cycles[i]))", "os import networkx as nx import scipy.sparse as sp def search_cycle(dir_adj): dir_adj =", "[] for i in range(num_cycle): q.append(len(cycles[i])) q = set(q) return q dataset_path =", "import scipy.sparse as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj))", "'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj = dir_adj.tocsc() q", "as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle =", "in range(num_cycle): q.append(len(cycles[i])) q = set(q) return q dataset_path = './data' dataset_name =", "range(num_cycle): q.append(len(cycles[i])) q = set(q) return q dataset_path = './data' dataset_name = 'cornell'", "= nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle = len(cycles) q = [] for", "scipy.sparse as sp def search_cycle(dir_adj): dir_adj = nx.from_scipy_sparse_matrix(A=dir_adj, create_using=nx.DiGraph) cycles = list(nx.algorithms.cycles.simple_cycles(dir_adj)) num_cycle", "return q dataset_path = './data' dataset_name = 'cornell' dataset_path = os.path.join(dataset_path, dataset_name) dir_adj", "i in range(num_cycle): q.append(len(cycles[i])) q = set(q) return q dataset_path = './data' dataset_name", "dataset_path = os.path.join(dataset_path, dataset_name) dir_adj = sp.load_npz(os.path.join(dataset_path, 'adj.npz')) dir_adj = dir_adj.tocsc() q =", "num_cycle = len(cycles) q = [] for i in range(num_cycle): q.append(len(cycles[i])) q =", "q = [] for i in range(num_cycle): q.append(len(cycles[i])) q = set(q) return q" ]
[ "= [] for i,word in enumerate(words): for j in range(len(word)+1): a,b = word[:j],", "for j in range(m + 1): if isPalindrome(word, j, m - 1): leftId", "def find(w): return data.get(w, -1) ans = [] for i,word in enumerate(words): for", "if j and isPalindrome(b): k = find(a) if k != -1 and k", "i: ret.append([i, leftId]) if j and isPalindrome(word, 0, j - 1): rightId =", "List[List[int]]: def findWord(s: str, left: int, right: int) -> int: return indices.get(s[left:right+1], -1)", "def isPalindrome(s): return s == s[::-1] data = {word[::-1]:i for i,word in enumerate(words)}", "# print(f(words)) # words = [\"a\", \"\"] # print(f(words)) class Solution: def palindromePairs(self,", ": f.py # Description : # Author : # Creation Date : 2021-11-10", "ans.append([i, k]) return ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words =", "def f(words): def isPalindrome(s): return s == s[::-1] data = {word[::-1]:i for i,word", "in range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a): k = find(b) if k", "word in enumerate(words): m = len(word) for j in range(m + 1): if", "List[str]) -> List[List[int]]: def findWord(s: str, left: int, right: int) -> int: return", "ret.append([i, leftId]) if j and isPalindrome(word, 0, j - 1): rightId = findWord(word,", "Description : # Author : # Creation Date : 2021-11-10 # Last Modified", "-> List[List[int]]: def findWord(s: str, left: int, right: int) -> int: return indices.get(s[left:right+1],", "k]) return ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"]", "str, left: int, right: int) -> int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str,", "-1) ans = [] for i,word in enumerate(words): for j in range(len(word)+1): a,b", "= word[:j], word[j:] if isPalindrome(a): k = find(b) if k != -1 and", "[\"bat\",\"tab\",\"cat\"] # print(f(words)) # words = [\"a\", \"\"] # print(f(words)) class Solution: def", "# -*-coding:utf-8-*- # File Name : f.py # Description : # Author :", "# ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words = [\"a\",", "bool: return (sub := s[left:right+1]) == sub[::-1] n = len(words) indices = {word[::-1]:", "ret = list() for i, word in enumerate(words): m = len(word) for j", "find(b) if k != -1 and k != i: ans.append([k, i]) if j", "enumerate(words): m = len(word) for j in range(m + 1): if isPalindrome(word, j,", "Author : # Creation Date : 2021-11-10 # Last Modified : 2021年11月10日 星期三", "= len(words) indices = {word[::-1]: i for i, word in enumerate(words)} ret =", "!= -1 and k != i: ans.append([k, i]) if j and isPalindrome(b): k", "# Creation Date : 2021-11-10 # Last Modified : 2021年11月10日 星期三 06时56分48秒 #", "palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s: str, left: int, right: int) ->", "[] for i,word in enumerate(words): for j in range(len(word)+1): a,b = word[:j], word[j:]", "- 1) if leftId != -1 and leftId != i: ret.append([i, leftId]) if", "leftId != -1 and leftId != i: ret.append([i, leftId]) if j and isPalindrome(word,", "f.py # Description : # Author : # Creation Date : 2021-11-10 #", "j and isPalindrome(b): k = find(a) if k != -1 and k !=", "isPalindrome(word, j, m - 1): leftId = findWord(word, 0, j - 1) if", "k != i: ans.append([k, i]) if j and isPalindrome(b): k = find(a) if", "isPalindrome(s: str, left: int, right: int) -> bool: return (sub := s[left:right+1]) ==", "-> bool: return (sub := s[left:right+1]) == sub[::-1] n = len(words) indices =", "{word[::-1]:i for i,word in enumerate(words)} def find(w): return data.get(w, -1) ans = []", "[\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words =", "list() for i, word in enumerate(words): m = len(word) for j in range(m", "data.get(w, -1) ans = [] for i,word in enumerate(words): for j in range(len(word)+1):", "i: ans.append([k, i]) if j and isPalindrome(b): k = find(a) if k !=", ": 2021年11月10日 星期三 06时56分48秒 # Created By : lsl def f(words): def isPalindrome(s):", "m - 1): leftId = findWord(word, 0, j - 1) if leftId !=", "i, word in enumerate(words)} ret = list() for i, word in enumerate(words): m", "int, right: int) -> bool: return (sub := s[left:right+1]) == sub[::-1] n =", "int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int, right: int) -> bool:", "- 1): rightId = findWord(word, j, m - 1) if rightId != -1", "def findWord(s: str, left: int, right: int) -> int: return indices.get(s[left:right+1], -1) def", "k != -1 and k != i: ans.append([k, i]) if j and isPalindrome(b):", "ans = [] for i,word in enumerate(words): for j in range(len(word)+1): a,b =", "def palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s: str, left: int, right: int)", "words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words = [\"a\", \"\"] # print(f(words)) class", "for i,word in enumerate(words)} def find(w): return data.get(w, -1) ans = [] for", "in enumerate(words)} def find(w): return data.get(w, -1) ans = [] for i,word in", "left: int, right: int) -> int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left:", "Name : f.py # Description : # Author : # Creation Date :", "return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int, right: int) -> bool: return", "# Created By : lsl def f(words): def isPalindrome(s): return s == s[::-1]", "-> int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int, right: int) ->", "ans.append([k, i]) if j and isPalindrome(b): k = find(a) if k != -1", "words = [\"a\", \"\"] # print(f(words)) class Solution: def palindromePairs(self, words: List[str]) ->", "m = len(word) for j in range(m + 1): if isPalindrome(word, j, m", "len(words) indices = {word[::-1]: i for i, word in enumerate(words)} ret = list()", "i: ans.append([i, k]) return ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words", "星期三 06时56分48秒 # Created By : lsl def f(words): def isPalindrome(s): return s", ":= s[left:right+1]) == sub[::-1] n = len(words) indices = {word[::-1]: i for i,", "lsl def f(words): def isPalindrome(s): return s == s[::-1] data = {word[::-1]:i for", "1): if isPalindrome(word, j, m - 1): leftId = findWord(word, 0, j -", "if j and isPalindrome(word, 0, j - 1): rightId = findWord(word, j, m", "k = find(a) if k != -1 and k != i: ans.append([i, k])", "# File Name : f.py # Description : # Author : # Creation", "return ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] #", ": # Creation Date : 2021-11-10 # Last Modified : 2021年11月10日 星期三 06时56分48秒", "k = find(b) if k != -1 and k != i: ans.append([k, i])", "right: int) -> int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int, right:", "i,word in enumerate(words)} def find(w): return data.get(w, -1) ans = [] for i,word", ",\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words = [\"a\", \"\"]", "indices = {word[::-1]: i for i, word in enumerate(words)} ret = list() for", "== sub[::-1] n = len(words) indices = {word[::-1]: i for i, word in", "if k != -1 and k != i: ans.append([k, i]) if j and", "and isPalindrome(b): k = find(a) if k != -1 and k != i:", "# Description : # Author : # Creation Date : 2021-11-10 # Last", "word[:j], word[j:] if isPalindrome(a): k = find(b) if k != -1 and k", "print(f(words)) # words = [\"a\", \"\"] # print(f(words)) class Solution: def palindromePairs(self, words:", "for j in range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a): k = find(b)", "and leftId != i: ret.append([i, leftId]) if j and isPalindrome(word, 0, j -", "if isPalindrome(word, j, m - 1): leftId = findWord(word, 0, j - 1)", "1): rightId = findWord(word, j, m - 1) if rightId != -1 and", "Last Modified : 2021年11月10日 星期三 06时56分48秒 # Created By : lsl def f(words):", "# words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words = [\"a\", \"\"] # print(f(words))", "if leftId != -1 and leftId != i: ret.append([i, leftId]) if j and", ": # Author : # Creation Date : 2021-11-10 # Last Modified :", "if k != -1 and k != i: ans.append([i, k]) return ans words", "Date : 2021-11-10 # Last Modified : 2021年11月10日 星期三 06时56分48秒 # Created By", "return data.get(w, -1) ans = [] for i,word in enumerate(words): for j in", "n = len(words) indices = {word[::-1]: i for i, word in enumerate(words)} ret", ": lsl def f(words): def isPalindrome(s): return s == s[::-1] data = {word[::-1]:i", "= [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words", "i for i, word in enumerate(words)} ret = list() for i, word in", "= {word[::-1]: i for i, word in enumerate(words)} ret = list() for i,", "findWord(word, 0, j - 1) if leftId != -1 and leftId != i:", "== s[::-1] data = {word[::-1]:i for i,word in enumerate(words)} def find(w): return data.get(w,", "- 1): leftId = findWord(word, 0, j - 1) if leftId != -1", "= findWord(word, j, m - 1) if rightId != -1 and rightId !=", "and k != i: ans.append([k, i]) if j and isPalindrome(b): k = find(a)", "s[left:right+1]) == sub[::-1] n = len(words) indices = {word[::-1]: i for i, word", "j in range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a): k = find(b) if", "for i,word in enumerate(words): for j in range(len(word)+1): a,b = word[:j], word[j:] if", "#!/usr/bin/env python # -*-coding:utf-8-*- # File Name : f.py # Description : #", "-1 and k != i: ans.append([i, k]) return ans words = [\"abcd\",\"dcba\"] #", "len(word) for j in range(m + 1): if isPalindrome(word, j, m - 1):", "isPalindrome(a): k = find(b) if k != -1 and k != i: ans.append([k,", "in range(m + 1): if isPalindrome(word, j, m - 1): leftId = findWord(word,", "1) if rightId != -1 and rightId != i: ret.append([rightId, i]) return ret", "find(a) if k != -1 and k != i: ans.append([i, k]) return ans", "findWord(word, j, m - 1) if rightId != -1 and rightId != i:", "find(w): return data.get(w, -1) ans = [] for i,word in enumerate(words): for j", "= find(a) if k != -1 and k != i: ans.append([i, k]) return", "j, m - 1): leftId = findWord(word, 0, j - 1) if leftId", "File Name : f.py # Description : # Author : # Creation Date", "By : lsl def f(words): def isPalindrome(s): return s == s[::-1] data =", "ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] # print(f(words))", "\"\"] # print(f(words)) class Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s:", "!= i: ans.append([i, k]) return ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) #", "range(m + 1): if isPalindrome(word, j, m - 1): leftId = findWord(word, 0,", "i]) if j and isPalindrome(b): k = find(a) if k != -1 and", "python # -*-coding:utf-8-*- # File Name : f.py # Description : # Author", "data = {word[::-1]:i for i,word in enumerate(words)} def find(w): return data.get(w, -1) ans", "k != -1 and k != i: ans.append([i, k]) return ans words =", "!= i: ret.append([i, leftId]) if j and isPalindrome(word, 0, j - 1): rightId", "Modified : 2021年11月10日 星期三 06时56分48秒 # Created By : lsl def f(words): def", "enumerate(words)} def find(w): return data.get(w, -1) ans = [] for i,word in enumerate(words):", "sub[::-1] n = len(words) indices = {word[::-1]: i for i, word in enumerate(words)}", "words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) #", "1) if leftId != -1 and leftId != i: ret.append([i, leftId]) if j", "# words = [\"a\", \"\"] # print(f(words)) class Solution: def palindromePairs(self, words: List[str])", "print(f(words)) # words = [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words = [\"a\", \"\"] #", "[\"a\", \"\"] # print(f(words)) class Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]: def", "int) -> bool: return (sub := s[left:right+1]) == sub[::-1] n = len(words) indices", "s == s[::-1] data = {word[::-1]:i for i,word in enumerate(words)} def find(w): return", "for i, word in enumerate(words): m = len(word) for j in range(m +", "(sub := s[left:right+1]) == sub[::-1] n = len(words) indices = {word[::-1]: i for", "!= -1 and leftId != i: ret.append([i, leftId]) if j and isPalindrome(word, 0,", "2021年11月10日 星期三 06时56分48秒 # Created By : lsl def f(words): def isPalindrome(s): return", "i, word in enumerate(words): m = len(word) for j in range(m + 1):", "enumerate(words): for j in range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a): k =", "06时56分48秒 # Created By : lsl def f(words): def isPalindrome(s): return s ==", "in enumerate(words)} ret = list() for i, word in enumerate(words): m = len(word)", "word in enumerate(words)} ret = list() for i, word in enumerate(words): m =", "j, m - 1) if rightId != -1 and rightId != i: ret.append([rightId,", "and k != i: ans.append([i, k]) return ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"]", "+ 1): if isPalindrome(word, j, m - 1): leftId = findWord(word, 0, j", "j - 1) if leftId != -1 and leftId != i: ret.append([i, leftId])", "str, left: int, right: int) -> bool: return (sub := s[left:right+1]) == sub[::-1]", "= [\"bat\",\"tab\",\"cat\"] # print(f(words)) # words = [\"a\", \"\"] # print(f(words)) class Solution:", "int, right: int) -> int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int,", "= find(b) if k != -1 and k != i: ans.append([k, i]) if", "isPalindrome(b): k = find(a) if k != -1 and k != i: ans.append([i,", "Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s: str, left: int, right:", "1): leftId = findWord(word, 0, j - 1) if leftId != -1 and", "if isPalindrome(a): k = find(b) if k != -1 and k != i:", "-1 and leftId != i: ret.append([i, leftId]) if j and isPalindrome(word, 0, j", "indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int, right: int) -> bool: return (sub", "-1 and k != i: ans.append([k, i]) if j and isPalindrome(b): k =", "def isPalindrome(s: str, left: int, right: int) -> bool: return (sub := s[left:right+1])", "range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a): k = find(b) if k !=", "right: int) -> bool: return (sub := s[left:right+1]) == sub[::-1] n = len(words)", "- 1) if rightId != -1 and rightId != i: ret.append([rightId, i]) return", "i,word in enumerate(words): for j in range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a):", "and isPalindrome(word, 0, j - 1): rightId = findWord(word, j, m - 1)", "!= -1 and k != i: ans.append([i, k]) return ans words = [\"abcd\",\"dcba\"]", "{word[::-1]: i for i, word in enumerate(words)} ret = list() for i, word", "-*-coding:utf-8-*- # File Name : f.py # Description : # Author : #", "in enumerate(words): for j in range(len(word)+1): a,b = word[:j], word[j:] if isPalindrome(a): k", "= findWord(word, 0, j - 1) if leftId != -1 and leftId !=", "Created By : lsl def f(words): def isPalindrome(s): return s == s[::-1] data", "leftId != i: ret.append([i, leftId]) if j and isPalindrome(word, 0, j - 1):", "= {word[::-1]:i for i,word in enumerate(words)} def find(w): return data.get(w, -1) ans =", "return s == s[::-1] data = {word[::-1]:i for i,word in enumerate(words)} def find(w):", "= list() for i, word in enumerate(words): m = len(word) for j in", "rightId = findWord(word, j, m - 1) if rightId != -1 and rightId", "Creation Date : 2021-11-10 # Last Modified : 2021年11月10日 星期三 06时56分48秒 # Created", "return (sub := s[left:right+1]) == sub[::-1] n = len(words) indices = {word[::-1]: i", "left: int, right: int) -> bool: return (sub := s[left:right+1]) == sub[::-1] n", "class Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s: str, left: int,", "a,b = word[:j], word[j:] if isPalindrome(a): k = find(b) if k != -1", "findWord(s: str, left: int, right: int) -> int: return indices.get(s[left:right+1], -1) def isPalindrome(s:", "k != i: ans.append([i, k]) return ans words = [\"abcd\",\"dcba\"] # ,\"lls\",\"s\",\"sssll\"] print(f(words))", "j - 1): rightId = findWord(word, j, m - 1) if rightId !=", "isPalindrome(s): return s == s[::-1] data = {word[::-1]:i for i,word in enumerate(words)} def", "!= i: ans.append([k, i]) if j and isPalindrome(b): k = find(a) if k", "# Author : # Creation Date : 2021-11-10 # Last Modified : 2021年11月10日", "2021-11-10 # Last Modified : 2021年11月10日 星期三 06时56分48秒 # Created By : lsl", "j and isPalindrome(word, 0, j - 1): rightId = findWord(word, j, m -", "m - 1) if rightId != -1 and rightId != i: ret.append([rightId, i])", "0, j - 1): rightId = findWord(word, j, m - 1) if rightId", "leftId = findWord(word, 0, j - 1) if leftId != -1 and leftId", "enumerate(words)} ret = list() for i, word in enumerate(words): m = len(word) for", "int) -> int: return indices.get(s[left:right+1], -1) def isPalindrome(s: str, left: int, right: int)", "word[j:] if isPalindrome(a): k = find(b) if k != -1 and k !=", "# print(f(words)) class Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s: str,", "print(f(words)) class Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]: def findWord(s: str, left:", "isPalindrome(word, 0, j - 1): rightId = findWord(word, j, m - 1) if", "for i, word in enumerate(words)} ret = list() for i, word in enumerate(words):", "# Last Modified : 2021年11月10日 星期三 06时56分48秒 # Created By : lsl def", "j in range(m + 1): if isPalindrome(word, j, m - 1): leftId =", "leftId]) if j and isPalindrome(word, 0, j - 1): rightId = findWord(word, j,", "words: List[str]) -> List[List[int]]: def findWord(s: str, left: int, right: int) -> int:", "-1) def isPalindrome(s: str, left: int, right: int) -> bool: return (sub :=", "= [\"a\", \"\"] # print(f(words)) class Solution: def palindromePairs(self, words: List[str]) -> List[List[int]]:", ": 2021-11-10 # Last Modified : 2021年11月10日 星期三 06时56分48秒 # Created By :", "in enumerate(words): m = len(word) for j in range(m + 1): if isPalindrome(word,", "= len(word) for j in range(m + 1): if isPalindrome(word, j, m -", "0, j - 1) if leftId != -1 and leftId != i: ret.append([i,", "s[::-1] data = {word[::-1]:i for i,word in enumerate(words)} def find(w): return data.get(w, -1)", "f(words): def isPalindrome(s): return s == s[::-1] data = {word[::-1]:i for i,word in" ]
[ "# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK", "of this resource. A hash of the contents stored in this object. This", "when the resource is created. The name must be 1-63 characters long, and", "not edit by hand unless you're certain you know what you are doing!", "to be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str:", "name must be 1-63 characters long, and comply with RFC1035. Specifically, the name", "self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] = None, region: Optional[str] =", "self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def", "raise TypeError(\"Expected argument 'security_policy' to be a str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link", "settable as a field in the request body. \"\"\" return pulumi.get(self, \"region\") @property", "\"fingerprint\", fingerprint) if kind and not isinstance(kind, str): raise TypeError(\"Expected argument 'kind' to", "to be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and not isinstance(kind, str):", "] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None,", "pulumi.get(self, \"name\") @property @pulumi.getter def region(self) -> str: \"\"\" URL of the region", "pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import", "__args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region'] = region if opts is None:", "must be 1-63 characters long, and comply with RFC1035. Specifically, the name must", "be a lowercase letter, and all following characters must be a dash, lowercase", "self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp' to", "must be a dash, lowercase letter, or digit, except the last character, which", "= None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: \"\"\" Gets a specified NetworkEdgeSecurityService.", "*** # *** Do not edit by hand unless you're certain you know", "raise TypeError(\"Expected argument 'kind' to be a str\") pulumi.set(__self__, \"kind\", kind) if name", "stored in this object. This field is used in optimistic locking. This field", "the first character must be a lowercase letter, and all following characters must", "str): raise TypeError(\"Expected argument 'fingerprint' to be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if", "None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: \"\"\" Gets", "and not isinstance(kind, str): raise TypeError(\"Expected argument 'kind' to be a str\") pulumi.set(__self__,", "not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id' to be a str\") pulumi.set(__self__, \"self_link_with_id\",", "kind(self) -> str: return pulumi.get(self, \"kind\") @property @pulumi.getter def name(self) -> str: \"\"\"", "isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy' to be a str\") pulumi.set(__self__, \"security_policy\", security_policy)", "str): raise TypeError(\"Expected argument 'region' to be a str\") pulumi.set(__self__, \"region\", region) if", "and not isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link' to be a str\") pulumi.set(__self__,", "kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project:", "resource URL for the network edge security service associated with this network edge", "a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and not isinstance(kind, str): raise TypeError(\"Expected", "get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] = None, region: Optional[str] = None, opts:", "None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ =", "\"\"\" An optional description of this resource. Provide this property when you create", "str): raise TypeError(\"Expected argument 'creation_timestamp' to be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if", "return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self) -> str: return pulumi.get(self, \"kind\") @property", "field will be ignored when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must be", "for the network edge security service associated with this network edge security service.", "def description(self) -> str: \"\"\" An optional description of this resource. Provide this", "Provided by the client when the resource is created. The name must be", "if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp' to be a", "Optional[str] = None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult:", "last character, which cannot be a dash. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter", "to retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self) ->", "kind) if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be", "@property @pulumi.getter def description(self) -> str: \"\"\" An optional description of this resource.", "'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None,", "return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def", "not isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy' to be a str\") pulumi.set(__self__, \"security_policy\",", "request URL. It is not settable as a field in the request body.", "\"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return", "pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\" The resource URL for", "\"kind\", kind) if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to", "if kind and not isinstance(kind, str): raise TypeError(\"Expected argument 'kind' to be a", "lowercase letter, or digit, except the last character, which cannot be a dash.", "Pulumi SDK Generator. *** # *** Do not edit by hand unless you're", "... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class", "letter, or digit, except the last character, which cannot be a dash. \"\"\"", "AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp,", "the NetworkEdgeSecurityService, otherwise the request will fail with error 412 conditionNotMet. To see", "a str\") pulumi.set(__self__, \"region\", region) if security_policy and not isinstance(security_policy, str): raise TypeError(\"Expected", "str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if", "make a get() request to retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property", "-> str: \"\"\" Name of the resource. Provided by the client when the", "retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self) -> str:", "region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a", "yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id)", "is not settable as a field in the request body. \"\"\" return pulumi.get(self,", "False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link,", "'kind' to be a str\") pulumi.set(__self__, \"kind\", kind) if name and not isinstance(name,", "'self_link_with_id' to be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) ->", "@property @pulumi.getter def kind(self) -> str: return pulumi.get(self, \"kind\") @property @pulumi.getter def name(self)", "opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" __args__", "__args__['region'] = region if opts is None: opts = pulumi.InvokeOptions() if opts.version is", "creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]]", "isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name)", "URL for the network edge security service associated with this network edge security", "a specified NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project", "conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a", "name) if region and not isinstance(region, str): raise TypeError(\"Expected argument 'region' to be", "\"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\" The resource", "coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator.", "str: \"\"\" URL of the region where the resource resides. You must specify", "resides. You must specify this field as part of the HTTP request URL.", "network edge security service. \"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) ->", "security service associated with this network edge security service. \"\"\" return pulumi.get(self, \"security_policy\")", "\"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if False:", "for the resource. \"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str:", "resource. A hash of the contents stored in this object. This field is", "= network_edge_security_service __args__['project'] = project __args__['region'] = region if opts is None: opts", "= dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region'] = region if opts", "= None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService.", "not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\",", "must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means", "Fingerprint of this resource. A hash of the contents stored in this object.", "pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\" Server-defined URL for this", "specified NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region']", "raise TypeError(\"Expected argument 'creation_timestamp' to be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description", "self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\" Creation timestamp in RFC3339 text", "the resource resides. You must specify this field as part of the HTTP", "raise TypeError(\"Expected argument 'self_link_with_id' to be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\")", "the request will fail with error 412 conditionNotMet. To see the latest fingerprint,", "str): raise TypeError(\"Expected argument 'kind' to be a str\") pulumi.set(__self__, \"kind\", kind) if", "generated by the Pulumi SDK Generator. *** # *** Do not edit by", "NetworkEdgeSecurityService, otherwise the request will fail with error 412 conditionNotMet. To see the", "def fingerprint(self) -> str: \"\"\" Fingerprint of this resource. A hash of the", "region if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version", "opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description,", "be a str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link and not isinstance(self_link, str): raise", "will fail with error 412 conditionNotMet. To see the latest fingerprint, make a", "opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version()", "GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str]", "the HTTP request URL. It is not settable as a field in the", "and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be", "str\") pulumi.set(__self__, \"region\", region) if security_policy and not isinstance(security_policy, str): raise TypeError(\"Expected argument", "TypeError(\"Expected argument 'self_link_with_id' to be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def", "must be a lowercase letter, and all following characters must be a dash,", "@pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\" Server-defined URL for this resource with the", "-> str: \"\"\" Server-defined URL for the resource. \"\"\" return pulumi.get(self, \"self_link\") @property", "return pulumi.get(self, \"kind\") @property @pulumi.getter def name(self) -> str: \"\"\" Name of the", "__await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region,", "be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and not isinstance(kind, str): raise", "was generated by the Pulumi SDK Generator. *** # *** Do not edit", "fingerprint must be provided in order to update the NetworkEdgeSecurityService, otherwise the request", "a NetworkEdgeSecurityService. An up-to-date fingerprint must be provided in order to update the", "\"name\") @property @pulumi.getter def region(self) -> str: \"\"\" URL of the region where", "'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None,", "`[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and all", "and not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp' to be a str\") pulumi.set(__self__,", "\"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\" Server-defined URL for the resource.", "The name must be 1-63 characters long, and comply with RFC1035. Specifically, the", "kind and not isinstance(kind, str): raise TypeError(\"Expected argument 'kind' to be a str\")", "of the resource. Provided by the client when the resource is created. The", "this network edge security service. \"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self)", "will be ignored when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must be provided", "a field in the request body. \"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def", "a str\") pulumi.set(__self__, \"kind\", kind) if name and not isinstance(name, str): raise TypeError(\"Expected", "not isinstance(region, str): raise TypeError(\"Expected argument 'region' to be a str\") pulumi.set(__self__, \"region\",", "as a field in the request body. \"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\")", "raise TypeError(\"Expected argument 'self_link' to be a str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id", "description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp,", "expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter, and", "and not isinstance(region, str): raise TypeError(\"Expected argument 'region' to be a str\") pulumi.set(__self__,", "property when you create the resource. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def", "to be a str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and not isinstance(self_link_with_id, str):", "__args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link,", "\"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self) -> str: \"\"\" Fingerprint of", "by the Pulumi SDK Generator. *** # *** Do not edit by hand", "you're certain you know what you are doing! *** import warnings import pulumi", "@pulumi.getter def region(self) -> str: \"\"\" URL of the region where the resource", "Server-defined URL for this resource with the resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\")", "def kind(self) -> str: return pulumi.get(self, \"kind\") @property @pulumi.getter def name(self) -> str:", "unless you're certain you know what you are doing! *** import warnings import", "if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a", "Name of the resource. Provided by the client when the resource is created.", "create the resource. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self) -> str:", "\"\"\" URL of the region where the resource resides. You must specify this", "be a str\") pulumi.set(__self__, \"kind\", kind) if name and not isinstance(name, str): raise", "contents stored in this object. This field is used in optimistic locking. This", "description of this resource. Provide this property when you create the resource. \"\"\"", "return pulumi.get(self, \"name\") @property @pulumi.getter def region(self) -> str: \"\"\" URL of the", "RFC1035. Specifically, the name must be 1-63 characters long and match the regular", "\"\"\" Server-defined URL for this resource with the resource id. \"\"\" return pulumi.get(self,", "Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: \"\"\" Gets a specified", "TypeError(\"Expected argument 'region' to be a str\") pulumi.set(__self__, \"region\", region) if security_policy and", "def creation_timestamp(self) -> str: \"\"\" Creation timestamp in RFC3339 text format. \"\"\" return", "fingerprint, make a get() request to retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\")", "def __await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name,", "str\") pulumi.set(__self__, \"name\", name) if region and not isinstance(region, str): raise TypeError(\"Expected argument", "the request body. \"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str:", "\"creation_timestamp\", creation_timestamp) if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to", "@property @pulumi.getter def name(self) -> str: \"\"\" Name of the resource. Provided by", "'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None,", "-> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] =", "pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description'", "used in optimistic locking. This field will be ignored when inserting a NetworkEdgeSecurityService.", "'name' to be a str\") pulumi.set(__self__, \"name\", name) if region and not isinstance(region,", "security_policy(self) -> str: \"\"\" The resource URL for the network edge security service", "None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets", "pulumi.set(__self__, \"region\", region) if security_policy and not isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy'", "text format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self) -> str: \"\"\"", "\"\"\" Fingerprint of this resource. A hash of the contents stored in this", "'creation_timestamp' to be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and not isinstance(description,", "resource with the resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint:", "kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str): raise", "by hand unless you're certain you know what you are doing! *** import", "TypeError(\"Expected argument 'security_policy' to be a str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link and", "pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\" Server-defined URL for the", "*** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional,", "URL for the resource. \"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) ->", "for this resource with the resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult):", "you are doing! *** import warnings import pulumi import pulumi.runtime from typing import", "a dash. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def region(self) -> str: \"\"\"", "-> str: \"\"\" The resource URL for the network edge security service associated", "pulumi.get(self, \"kind\") @property @pulumi.getter def name(self) -> str: \"\"\" Name of the resource.", "To see the latest fingerprint, make a get() request to retrieve a NetworkEdgeSecurityService.", "overload from ... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ]", "str: \"\"\" The resource URL for the network edge security service associated with", "-> str: \"\"\" Server-defined URL for this resource with the resource id. \"\"\"", "Optional, Sequence, Union, overload from ... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult',", "and all following characters must be a dash, lowercase letter, or digit, except", "'self_link' to be a str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and not isinstance(self_link_with_id,", "isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint' to be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint)", "\"region\", region) if security_policy and not isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy' to", "AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service:", "description(self) -> str: \"\"\" An optional description of this resource. Provide this property", "and not isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy' to be a str\") pulumi.set(__self__,", "creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] =", "latest fingerprint, make a get() request to retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self,", "get() request to retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def", "typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service)", "isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp' to be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp)", "the region where the resource resides. You must specify this field as part", "# *** Do not edit by hand unless you're certain you know what", "'fingerprint' to be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and not isinstance(kind,", "in optimistic locking. This field will be ignored when inserting a NetworkEdgeSecurityService. An", "be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the", "None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp,", "pulumi.set(__self__, \"description\", description) if fingerprint and not isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint'", "be a str\") pulumi.set(__self__, \"region\", region) if security_policy and not isinstance(security_policy, str): raise", "*** Do not edit by hand unless you're certain you know what you", "must specify this field as part of the HTTP request URL. It is", "from ... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type", "a str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link and not isinstance(self_link, str): raise TypeError(\"Expected", "pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy,", "or digit, except the last character, which cannot be a dash. \"\"\" return", "str: \"\"\" Server-defined URL for the resource. \"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\")", "This field is used in optimistic locking. This field will be ignored when", "timestamp in RFC3339 text format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self)", "__ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name,", "\"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self) -> str: return pulumi.get(self, \"kind\")", "id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if", "URL for this resource with the resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class", "is used in optimistic locking. This field will be ignored when inserting a", "pulumi.set(__self__, \"security_policy\", security_policy) if self_link and not isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link'", "import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities __all__ =", "lowercase letter, and all following characters must be a dash, lowercase letter, or", "URL of the region where the resource resides. You must specify this field", "[ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None,", "None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\"", "inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must be provided in order to update", "the last character, which cannot be a dash. \"\"\" return pulumi.get(self, \"name\") @property", "@property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\" Server-defined URL for this resource with", "network_edge_security_service __args__['project'] = project __args__['region'] = region if opts is None: opts =", "isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description)", "region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: \"\"\" Gets a", "fail with error 412 conditionNotMet. To see the latest fingerprint, make a get()", "if region and not isinstance(region, str): raise TypeError(\"Expected argument 'region' to be a", "Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]:", "'description' to be a str\") pulumi.set(__self__, \"description\", description) if fingerprint and not isinstance(fingerprint,", "self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] = None, region: Optional[str]", "of the contents stored in this object. This field is used in optimistic", "NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self) -> str: return pulumi.get(self,", "None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\"", "request will fail with error 412 conditionNotMet. To see the latest fingerprint, make", "resource resides. You must specify this field as part of the HTTP request", "None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService']", "@property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\" Server-defined URL for the resource. \"\"\"", "edit by hand unless you're certain you know what you are doing! ***", "and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__,", "\"\"\" Name of the resource. Provided by the client when the resource is", "TypeError(\"Expected argument 'creation_timestamp' to be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and", "argument 'region' to be a str\") pulumi.set(__self__, \"region\", region) if security_policy and not", "if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value", "\"self_link\", self_link) if self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id' to", "pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self) -> str: return pulumi.get(self, \"kind\") @property @pulumi.getter", "specify this field as part of the HTTP request URL. It is not", "str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\" Creation timestamp", "is created. The name must be 1-63 characters long, and comply with RFC1035.", "Specifically, the name must be 1-63 characters long and match the regular expression", "\"description\", description) if fingerprint and not isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint' to", "resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self):", "opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return", "self_link) if self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id' to be", "provided in order to update the NetworkEdgeSecurityService, otherwise the request will fail with", "SDK Generator. *** # *** Do not edit by hand unless you're certain", "fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str):", "when you create the resource. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self)", "and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__,", "a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and not isinstance(description, str): raise TypeError(\"Expected", "client when the resource is created. The name must be 1-63 characters long,", "TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if fingerprint and", "not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp' to be a str\") pulumi.set(__self__, \"creation_timestamp\",", "argument 'self_link' to be a str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and not", "part of the HTTP request URL. It is not settable as a field", "__all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__,", "creation_timestamp) if description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be", "first character must be a lowercase letter, and all following characters must be", "with error 412 conditionNotMet. To see the latest fingerprint, make a get() request", "the resource is created. The name must be 1-63 characters long, and comply", "associated with this network edge security service. \"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\")", "to be a str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link and not isinstance(self_link, str):", "service associated with this network edge security service. \"\"\" return pulumi.get(self, \"security_policy\") @property", "doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping,", "to update the NetworkEdgeSecurityService, otherwise the request will fail with error 412 conditionNotMet.", "edge security service. \"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str:", "\"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project']", "1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63", "object. This field is used in optimistic locking. This field will be ignored", "name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which", "= None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] =", "name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]]", "this field as part of the HTTP request URL. It is not settable", "and comply with RFC1035. Specifically, the name must be 1-63 characters long and", "fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None,", "a dash, lowercase letter, or digit, except the last character, which cannot be", "name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] =", "-> str: \"\"\" Creation timestamp in RFC3339 text format. \"\"\" return pulumi.get(self, \"creation_timestamp\")", "region) if security_policy and not isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy' to be", "resource. Provided by the client when the resource is created. The name must", "character, which cannot be a dash. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def", "name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a str\")", "fingerprint(self) -> str: \"\"\" Fingerprint of this resource. A hash of the contents", "field is used in optimistic locking. This field will be ignored when inserting", "Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities __all__ = [", "__args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region'] = region if", "the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase", "description) if fingerprint and not isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint' to be", "in this object. This field is used in optimistic locking. This field will", "field in the request body. \"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self)", "@pulumi.getter def description(self) -> str: \"\"\" An optional description of this resource. Provide", "pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self) -> str: \"\"\" Fingerprint of this resource.", "locking. This field will be ignored when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint", "the resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def", "fingerprint) if kind and not isinstance(kind, str): raise TypeError(\"Expected argument 'kind' to be", "Creation timestamp in RFC3339 text format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def", "this property when you create the resource. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter", "# pylint: disable=using-constant-test def __await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description,", "_utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def", "kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str]", "isinstance(kind, str): raise TypeError(\"Expected argument 'kind' to be a str\") pulumi.set(__self__, \"kind\", kind)", "body. \"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\" The", "of the region where the resource resides. You must specify this field as", "An up-to-date fingerprint must be provided in order to update the NetworkEdgeSecurityService, otherwise", "up-to-date fingerprint must be provided in order to update the NetworkEdgeSecurityService, otherwise the", "if opts is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version =", "a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\" Creation", "field as part of the HTTP request URL. It is not settable as", "to be a str\") pulumi.set(__self__, \"kind\", kind) if name and not isinstance(name, str):", "def region(self) -> str: \"\"\" URL of the region where the resource resides.", "str: \"\"\" Fingerprint of this resource. A hash of the contents stored in", "resource. \"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\" Server-defined", "It is not settable as a field in the request body. \"\"\" return", "import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence,", "argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if region and not", "a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self) -> str: return", "created. The name must be 1-63 characters long, and comply with RFC1035. Specifically,", "disable=using-constant-test def __await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind,", "@_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] =", "raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if fingerprint", "if security_policy and not isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy' to be a", "pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts,", "the name must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?`", "pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\" Creation timestamp in", "\"fingerprint\") @property @pulumi.getter def kind(self) -> str: return pulumi.get(self, \"kind\") @property @pulumi.getter def", "TypeError(\"Expected argument 'kind' to be a str\") pulumi.set(__self__, \"kind\", kind) if name and", "str): raise TypeError(\"Expected argument 'security_policy' to be a str\") pulumi.set(__self__, \"security_policy\", security_policy) if", "-> str: return pulumi.get(self, \"kind\") @property @pulumi.getter def name(self) -> str: \"\"\" Name", "dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region'] = region if opts is", "def self_link(self) -> str: \"\"\" Server-defined URL for the resource. \"\"\" return pulumi.get(self,", "str: \"\"\" Server-defined URL for this resource with the resource id. \"\"\" return", "this resource. Provide this property when you create the resource. \"\"\" return pulumi.get(self,", "characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character", "warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union,", "be a dash. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def region(self) -> str:", "str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link and not isinstance(self_link, str): raise TypeError(\"Expected argument", "security service. \"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\"", "Gets a specified NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] =", "'security_policy' to be a str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link and not isinstance(self_link,", "and not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id' to be a str\") pulumi.set(__self__,", "return pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self) -> str: \"\"\" Fingerprint of this", "regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a lowercase letter,", "self_link and not isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link' to be a str\")", "raise TypeError(\"Expected argument 'fingerprint' to be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind", "otherwise the request will fail with error 412 conditionNotMet. To see the latest", "self_link_with_id(self) -> str: \"\"\" Server-defined URL for this resource with the resource id.", "None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None)", "not isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link' to be a str\") pulumi.set(__self__, \"self_link\",", "cannot be a dash. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def region(self) ->", "URL. It is not settable as a field in the request body. \"\"\"", "\"creation_timestamp\") @property @pulumi.getter def description(self) -> str: \"\"\" An optional description of this", "typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities __all__", "project: Optional[str] = None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) ->", "hash of the contents stored in this object. This field is used in", "TypeError(\"Expected argument 'self_link' to be a str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and", "description and not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\")", "this resource. A hash of the contents stored in this object. This field", "a str\") pulumi.set(__self__, \"name\", name) if region and not isinstance(region, str): raise TypeError(\"Expected", "@property @pulumi.getter def region(self) -> str: \"\"\" URL of the region where the", "pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self) -> str: \"\"\" An optional description of", "resource. Provide this property when you create the resource. \"\"\" return pulumi.get(self, \"description\")", "project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) ->", "Server-defined URL for the resource. \"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self)", "raise TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if region", "format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self) -> str: \"\"\" An", "region and not isinstance(region, str): raise TypeError(\"Expected argument 'region' to be a str\")", "\"description\") @property @pulumi.getter def fingerprint(self) -> str: \"\"\" Fingerprint of this resource. A", "dash. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def region(self) -> str: \"\"\" URL", "certain you know what you are doing! *** import warnings import pulumi import", "isinstance(region, str): raise TypeError(\"Expected argument 'region' to be a str\") pulumi.set(__self__, \"region\", region)", "@pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\" Server-defined URL for the resource. \"\"\" return", "ignored when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must be provided in order", "HTTP request URL. It is not settable as a field in the request", "@pulumi.getter def name(self) -> str: \"\"\" Name of the resource. Provided by the", "be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\"", "Generator. *** # *** Do not edit by hand unless you're certain you", "the Pulumi SDK Generator. *** # *** Do not edit by hand unless", "optional description of this resource. Provide this property when you create the resource.", "update the NetworkEdgeSecurityService, otherwise the request will fail with error 412 conditionNotMet. To", "You must specify this field as part of the HTTP request URL. It", "argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if fingerprint and not", "the client when the resource is created. The name must be 1-63 characters", "region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument", "the resource. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self) -> str: \"\"\"", "and not isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint' to be a str\") pulumi.set(__self__,", "str): raise TypeError(\"Expected argument 'self_link' to be a str\") pulumi.set(__self__, \"self_link\", self_link) if", "\"\"\" The resource URL for the network edge security service associated with this", "str: \"\"\" Name of the resource. Provided by the client when the resource", "'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None,", "security_policy) if self_link and not isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link' to be", "import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from ...", "pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload from", "= pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region,", "The resource URL for the network edge security service associated with this network", "str: \"\"\" An optional description of this resource. Provide this property when you", "see the latest fingerprint, make a get() request to retrieve a NetworkEdgeSecurityService. \"\"\"", "= None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: \"\"\"", "@property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\" Creation timestamp in RFC3339 text format.", "project __args__['region'] = region if opts is None: opts = pulumi.InvokeOptions() if opts.version", "a str\") pulumi.set(__self__, \"description\", description) if fingerprint and not isinstance(fingerprint, str): raise TypeError(\"Expected", "\"kind\") @property @pulumi.getter def name(self) -> str: \"\"\" Name of the resource. Provided", "order to update the NetworkEdgeSecurityService, otherwise the request will fail with error 412", "\"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\" Server-defined URL", "name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError(\"Expected", "character must be a lowercase letter, and all following characters must be a", "the latest fingerprint, make a get() request to retrieve a NetworkEdgeSecurityService. \"\"\" return", "Optional[str] = None, project: Optional[str] = None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions]", "self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id' to be a str\")", "which means the first character must be a lowercase letter, and all following", "to be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and not isinstance(description, str):", "resource. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self) -> str: \"\"\" Fingerprint", "letter, and all following characters must be a dash, lowercase letter, or digit,", "from typing import Any, Mapping, Optional, Sequence, Union, overload from ... import _utilities", "know what you are doing! *** import warnings import pulumi import pulumi.runtime from", "not isinstance(kind, str): raise TypeError(\"Expected argument 'kind' to be a str\") pulumi.set(__self__, \"kind\",", "str\") pulumi.set(__self__, \"kind\", kind) if name and not isinstance(name, str): raise TypeError(\"Expected argument", "fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project:", "in order to update the NetworkEdgeSecurityService, otherwise the request will fail with error", "None, project: Optional[str] = None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None)", "\"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\" Server-defined URL for this resource", "Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified", "in RFC3339 text format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self) ->", "str: return pulumi.get(self, \"kind\") @property @pulumi.getter def name(self) -> str: \"\"\" Name of", "the resource. Provided by the client when the resource is created. The name", "@pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None,", "be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and not isinstance(description, str): raise", "with the resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test", "def security_policy(self) -> str: \"\"\" The resource URL for the network edge security", "which cannot be a dash. \"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def region(self)", "self_link(self) -> str: \"\"\" Server-defined URL for the resource. \"\"\" return pulumi.get(self, \"self_link\")", "not settable as a field in the request body. \"\"\" return pulumi.get(self, \"region\")", "if self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id' to be a", "by the client when the resource is created. The name must be 1-63", "be 1-63 characters long, and comply with RFC1035. Specifically, the name must be", "An optional description of this resource. Provide this property when you create the", "class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult(", "following characters must be a dash, lowercase letter, or digit, except the last", "a get() request to retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter", "\"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\" Creation timestamp in RFC3339", "str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\", description) if", "security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp'", "__init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and", "pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id'", "creation_timestamp(self) -> str: \"\"\" Creation timestamp in RFC3339 text format. \"\"\" return pulumi.get(self,", "request to retrieve a NetworkEdgeSecurityService. \"\"\" return pulumi.get(self, \"fingerprint\") @property @pulumi.getter def kind(self)", "characters must be a dash, lowercase letter, or digit, except the last character,", "GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if", "isinstance(self_link_with_id, str): raise TypeError(\"Expected argument 'self_link_with_id' to be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id)", "TypeError(\"Expected argument 'fingerprint' to be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and", "raise TypeError(\"Expected argument 'region' to be a str\") pulumi.set(__self__, \"region\", region) if security_policy", "return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if False: yield", "WARNING: this file was generated by the Pulumi SDK Generator. *** # ***", "\"\"\" return pulumi.get(self, \"name\") @property @pulumi.getter def region(self) -> str: \"\"\" URL of", "= region if opts is None: opts = pulumi.InvokeOptions() if opts.version is None:", "dash, lowercase letter, or digit, except the last character, which cannot be a", "edge security service associated with this network edge security service. \"\"\" return pulumi.get(self,", "\"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\" Server-defined URL", "= None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" __args__ = dict()", "where the resource resides. You must specify this field as part of the", "= _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint,", "if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy,", "str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and not isinstance(kind, str): raise TypeError(\"Expected argument", "of this resource. Provide this property when you create the resource. \"\"\" return", "the resource. \"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\"", "to be a str\") pulumi.set(__self__, \"region\", region) if security_policy and not isinstance(security_policy, str):", "Do not edit by hand unless you're certain you know what you are", "1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first", "argument 'security_policy' to be a str\") pulumi.set(__self__, \"security_policy\", security_policy) if self_link and not", "= [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None,", "pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): # pylint: disable=using-constant-test def __await__(self): if False: yield self", "is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult(", "This field will be ignored when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must", "@pulumi.getter def kind(self) -> str: return pulumi.get(self, \"kind\") @property @pulumi.getter def name(self) ->", "_utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__, opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind,", "def self_link_with_id(self) -> str: \"\"\" Server-defined URL for this resource with the resource", "\"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self) -> str: \"\"\" An optional", "long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must", "region(self) -> str: \"\"\" URL of the region where the resource resides. You", "are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any,", "characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters", "return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service:", "error 412 conditionNotMet. To see the latest fingerprint, make a get() request to", "AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service", "is None: opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__", "not isinstance(description, str): raise TypeError(\"Expected argument 'description' to be a str\") pulumi.set(__self__, \"description\",", "self_link_with_id=None): if creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp' to be", "'region' to be a str\") pulumi.set(__self__, \"region\", region) if security_policy and not isinstance(security_policy,", "<reponame>AaronFriel/pulumi-google-native # coding=utf-8 # *** WARNING: this file was generated by the Pulumi", "\"\"\" Creation timestamp in RFC3339 text format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter", "\"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region'] = region", "= pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService', __args__,", "match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first character must be a", "@property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\" The resource URL for the network", "be ignored when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must be provided in", "be a str\") pulumi.set(__self__, \"description\", description) if fingerprint and not isinstance(fingerprint, str): raise", "file was generated by the Pulumi SDK Generator. *** # *** Do not", "comply with RFC1035. Specifically, the name must be 1-63 characters long and match", "fingerprint and not isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint' to be a str\")", "digit, except the last character, which cannot be a dash. \"\"\" return pulumi.get(self,", "str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError(\"Expected argument", "means the first character must be a lowercase letter, and all following characters", "\"name\", name) if region and not isinstance(region, str): raise TypeError(\"Expected argument 'region' to", "Provide this property when you create the resource. \"\"\" return pulumi.get(self, \"description\") @property", "all following characters must be a dash, lowercase letter, or digit, except the", "of the HTTP request URL. It is not settable as a field in", "security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] = None, region:", "def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] = None, region: Optional[str] = None,", "this resource with the resource id. \"\"\" return pulumi.get(self, \"self_link_with_id\") class AwaitableGetNetworkEdgeSecurityServiceResult(GetNetworkEdgeSecurityServiceResult): #", "pylint: disable=using-constant-test def __await__(self): if False: yield self return GetNetworkEdgeSecurityServiceResult( creation_timestamp=self.creation_timestamp, description=self.description, fingerprint=self.fingerprint,", "-> str: \"\"\" URL of the region where the resource resides. You must", "be a str\") pulumi.set(__self__, \"name\", name) if region and not isinstance(region, str): raise", "if self_link and not isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link' to be a", "str\") pulumi.set(__self__, \"description\", description) if fingerprint and not isinstance(fingerprint, str): raise TypeError(\"Expected argument", "security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None,", "a str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and not isinstance(self_link_with_id, str): raise TypeError(\"Expected", "get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None, opts:", "be a str\") pulumi.set(__self__, \"self_link\", self_link) if self_link_with_id and not isinstance(self_link_with_id, str): raise", "optimistic locking. This field will be ignored when inserting a NetworkEdgeSecurityService. An up-to-date", "creation_timestamp and not isinstance(creation_timestamp, str): raise TypeError(\"Expected argument 'creation_timestamp' to be a str\")", "def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None,", "return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\" Server-defined URL for", "-> str: \"\"\" Fingerprint of this resource. A hash of the contents stored", "pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and not isinstance(kind, str): raise TypeError(\"Expected argument 'kind'", "to be a str\") pulumi.set(__self__, \"name\", name) if region and not isinstance(region, str):", "\"security_policy\", security_policy) if self_link and not isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link' to", "with RFC1035. Specifically, the name must be 1-63 characters long and match the", "opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetNetworkEdgeSecurityServiceResult]: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" ...", "this file was generated by the Pulumi SDK Generator. *** # *** Do", "self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, region:", "Union, overload from ... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output',", "= None, project: Optional[str] = None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] =", "hand unless you're certain you know what you are doing! *** import warnings", "argument 'self_link_with_id' to be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property @pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self)", "argument 'kind' to be a str\") pulumi.set(__self__, \"kind\", kind) if name and not", "must be provided in order to update the NetworkEdgeSecurityService, otherwise the request will", "@pulumi.getter(name=\"creationTimestamp\") def creation_timestamp(self) -> str: \"\"\" Creation timestamp in RFC3339 text format. \"\"\"", "class GetNetworkEdgeSecurityServiceResult: def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None):", "return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\" The resource URL", "Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\" Gets a specified NetworkEdgeSecurityService. \"\"\" __args__ =", "the network edge security service associated with this network edge security service. \"\"\"", "if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name' to be a", "the contents stored in this object. This field is used in optimistic locking.", "if fingerprint and not isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint' to be a", "security_policy and not isinstance(security_policy, str): raise TypeError(\"Expected argument 'security_policy' to be a str\")", "a lowercase letter, and all following characters must be a dash, lowercase letter,", "Mapping, Optional, Sequence, Union, overload from ... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult',", "region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] =", "import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service', 'get_network_edge_security_service_output', ] @pulumi.output_type class GetNetworkEdgeSecurityServiceResult:", "you create the resource. \"\"\" return pulumi.get(self, \"description\") @property @pulumi.getter def fingerprint(self) ->", "name(self) -> str: \"\"\" Name of the resource. Provided by the client when", "pulumi.set(__self__, \"kind\", kind) if name and not isinstance(name, str): raise TypeError(\"Expected argument 'name'", "\"\"\" Server-defined URL for the resource. \"\"\" return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def", "str): raise TypeError(\"Expected argument 'self_link_with_id' to be a str\") pulumi.set(__self__, \"self_link_with_id\", self_link_with_id) @property", "Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions]", "service. \"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def self_link(self) -> str: \"\"\" Server-defined", "this object. This field is used in optimistic locking. This field will be", "opts=opts, typ=GetNetworkEdgeSecurityServiceResult).value return AwaitableGetNetworkEdgeSecurityServiceResult( creation_timestamp=__ret__.creation_timestamp, description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id)", "region where the resource resides. You must specify this field as part of", "TypeError(\"Expected argument 'name' to be a str\") pulumi.set(__self__, \"name\", name) if region and", "what you are doing! *** import warnings import pulumi import pulumi.runtime from typing", "A hash of the contents stored in this object. This field is used", "request body. \"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\"", "str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and not isinstance(description, str): raise TypeError(\"Expected argument", "pulumi.set(__self__, \"name\", name) if region and not isinstance(region, str): raise TypeError(\"Expected argument 'region'", "return pulumi.get(self, \"self_link\") @property @pulumi.getter(name=\"selfLinkWithId\") def self_link_with_id(self) -> str: \"\"\" Server-defined URL for", "not isinstance(fingerprint, str): raise TypeError(\"Expected argument 'fingerprint' to be a str\") pulumi.set(__self__, \"fingerprint\",", "= None, region: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNetworkEdgeSecurityServiceResult: \"\"\"", "in the request body. \"\"\" return pulumi.get(self, \"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) ->", "# *** WARNING: this file was generated by the Pulumi SDK Generator. ***", "@property @pulumi.getter def fingerprint(self) -> str: \"\"\" Fingerprint of this resource. A hash", "you know what you are doing! *** import warnings import pulumi import pulumi.runtime", "as part of the HTTP request URL. It is not settable as a", "str: \"\"\" Creation timestamp in RFC3339 text format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property", "resource is created. The name must be 1-63 characters long, and comply with", "opts = pulumi.InvokeOptions() if opts.version is None: opts.version = _utilities.get_version() __ret__ = pulumi.runtime.invoke('google-native:compute/alpha:getNetworkEdgeSecurityService',", "description=self.description, fingerprint=self.fingerprint, kind=self.kind, name=self.name, region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None,", "when inserting a NetworkEdgeSecurityService. An up-to-date fingerprint must be provided in order to", "argument 'fingerprint' to be a str\") pulumi.set(__self__, \"fingerprint\", fingerprint) if kind and not", "isinstance(self_link, str): raise TypeError(\"Expected argument 'self_link' to be a str\") pulumi.set(__self__, \"self_link\", self_link)", "NetworkEdgeSecurityService. An up-to-date fingerprint must be provided in order to update the NetworkEdgeSecurityService,", "-> str: \"\"\" An optional description of this resource. Provide this property when", "long, and comply with RFC1035. Specifically, the name must be 1-63 characters long", "network edge security service associated with this network edge security service. \"\"\" return", "def __init__(__self__, creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp", "Sequence, Union, overload from ... import _utilities __all__ = [ 'GetNetworkEdgeSecurityServiceResult', 'AwaitableGetNetworkEdgeSecurityServiceResult', 'get_network_edge_security_service',", "be provided in order to update the NetworkEdgeSecurityService, otherwise the request will fail", "*** WARNING: this file was generated by the Pulumi SDK Generator. *** #", "creation_timestamp=None, description=None, fingerprint=None, kind=None, name=None, region=None, security_policy=None, self_link=None, self_link_with_id=None): if creation_timestamp and not", "with this network edge security service. \"\"\" return pulumi.get(self, \"security_policy\") @property @pulumi.getter(name=\"selfLink\") def", "region=self.region, security_policy=self.security_policy, self_link=self.self_link, self_link_with_id=self.self_link_with_id) def get_network_edge_security_service(network_edge_security_service: Optional[str] = None, project: Optional[str] = None,", "@pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\" The resource URL for the network edge", "to be a str\") pulumi.set(__self__, \"description\", description) if fingerprint and not isinstance(fingerprint, str):", "__args__['project'] = project __args__['region'] = region if opts is None: opts = pulumi.InvokeOptions()", "be a dash, lowercase letter, or digit, except the last character, which cannot", "NetworkEdgeSecurityService. \"\"\" __args__ = dict() __args__['networkEdgeSecurityService'] = network_edge_security_service __args__['project'] = project __args__['region'] =", "description=__ret__.description, fingerprint=__ret__.fingerprint, kind=__ret__.kind, name=__ret__.name, region=__ret__.region, security_policy=__ret__.security_policy, self_link=__ret__.self_link, self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] =", "self_link_with_id=__ret__.self_link_with_id) @_utilities.lift_output_func(get_network_edge_security_service) def get_network_edge_security_service_output(network_edge_security_service: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, region: Optional[pulumi.Input[str]]", "import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union, overload", "412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve", "@pulumi.getter def fingerprint(self) -> str: \"\"\" Fingerprint of this resource. A hash of", "except the last character, which cannot be a dash. \"\"\" return pulumi.get(self, \"name\")", "RFC3339 text format. \"\"\" return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self) -> str:", "= project __args__['region'] = region if opts is None: opts = pulumi.InvokeOptions() if", "return pulumi.get(self, \"creation_timestamp\") @property @pulumi.getter def description(self) -> str: \"\"\" An optional description", "def name(self) -> str: \"\"\" Name of the resource. Provided by the client", "\"region\") @property @pulumi.getter(name=\"securityPolicy\") def security_policy(self) -> str: \"\"\" The resource URL for the", "argument 'creation_timestamp' to be a str\") pulumi.set(__self__, \"creation_timestamp\", creation_timestamp) if description and not" ]
[]
[]
[ "def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2],", "== Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick ==", "RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N 138\" def test_status_from_decimal_types():", "AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE =", "138\" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert", "assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert", "LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes", "= status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick,", "isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction,", "isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit,", "statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus)", "LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) ==", "def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED", "len(axes) == 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1],", "== LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types():", "axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit", "= statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str():", "assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis =", "statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N 138\" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert", "import ( AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, )", "assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def", "isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag(\"N\") == Status.IDLE assert Status.from_flag(\"B\")", "statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert", "RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes =", "assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert", "axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def", "Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N 138\" def test_status_from_decimal_types(): axis =", ") RDSTAT_RESPONSE = \":A 10N 138\" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status,", "axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit", "isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag(\"N\") ==", "axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert", "axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split():", "assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def", "isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values():", "status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE", "axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping", "assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert", "axis = status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor", "test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE)", "assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag(\"N\")", "10N 138\" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus)", "isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit,", "MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N 138\" def", "def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor,", "== MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction ==", "status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N 138\" def test_status_from_decimal_types(): axis = status_from_decimal(210)", "test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert", "assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert", "JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N", "AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A", "def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes =", "= status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor ==", "assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert", "axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert", "assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3", "== RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes", "== AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping ==", "assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert", "= statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0],", "3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert", "Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag(\"N\") == Status.IDLE assert Status.from_flag(\"B\") ==", "RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis", "== 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status)", "AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING", "from asitiger.status import ( AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal,", "test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus)", "assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status == Status.IDLE assert", "AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag(\"N\") == Status.IDLE", "AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection)", "== JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit ==", "== RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit ==", "axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def", "JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus)", "test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus)", "LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N 138\"", "axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert", "assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag(\"N\") == Status.IDLE assert Status.from_flag(\"B\") == Status.BUSY", "\":A 10N 138\" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled,", "= \":A 10N 138\" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status) assert", "MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus) assert isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus)", "MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN", "RDSTAT_RESPONSE = \":A 10N 138\" def test_status_from_decimal_types(): axis = status_from_decimal(210) assert isinstance(axis.status, Status)", "assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert", "== LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert len(axes)", "RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert", "assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status", "( AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE", "isinstance(axis.ramping_direction, RampingDirection) assert isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210)", "axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction", "assert len(axes) == 3 def test_statuses_for_rdstat_types(): axes = statuses_for_rdstat(RDSTAT_RESPONSE) assert isinstance(axes[0], AxisStatus) assert", "RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED", "assert axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert", "isinstance(axis.upper_limit, LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status ==", "Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick == JoystickStatus.DISABLED", "isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping,", "asitiger.status import ( AxisEnabledStatus, AxisStatus, JoystickStatus, LimitStatus, MotorStatus, RampingDirection, RampingStatus, Status, status_from_decimal, statuses_for_rdstat,", "LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled ==", "assert axis.upper_limit == LimitStatus.CLOSED assert axis.lower_limit == LimitStatus.CLOSED def test_statuses_for_rdstat_split(): axes = statuses_for_rdstat(RDSTAT_RESPONSE)", "Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus) assert isinstance(axis.ramping, RampingStatus)", "axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert axis.joystick", "assert isinstance(axes[1], Status) assert isinstance(axes[2], AxisStatus) def test_from_flag_str(): assert Status.from_flag(\"N\") == Status.IDLE assert", "LimitStatus) assert isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status == Status.IDLE", "status_from_decimal(210) assert isinstance(axis.status, Status) assert isinstance(axis.enabled, AxisEnabledStatus) assert isinstance(axis.motor, MotorStatus) assert isinstance(axis.joystick, JoystickStatus)", "assert axis.status == Status.IDLE assert axis.enabled == AxisEnabledStatus.ENABLED assert axis.motor == MotorStatus.INACTIVE assert", "JoystickStatus.DISABLED assert axis.ramping == RampingStatus.RAMPING assert axis.ramping_direction == RampingDirection.DOWN assert axis.upper_limit == LimitStatus.CLOSED", "isinstance(axis.lower_limit, LimitStatus) def test_status_from_decimal_values(): axis = status_from_decimal(210) assert axis.status == Status.IDLE assert axis.enabled", "RampingStatus, Status, status_from_decimal, statuses_for_rdstat, ) RDSTAT_RESPONSE = \":A 10N 138\" def test_status_from_decimal_types(): axis" ]
[ "RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data):", ".models import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User", "Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy", "('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta: model = Aktor fields", "instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model =", "'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta: model", "model = User fields = ['id', 'username', 'email','password'] extra_kwargs = {'password': {'required': True,", "'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class", "= ['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy = validated_data['filmy'] # del validated_data['filmy']", "{'password': {'required': True, 'write_only': True}} def create(self, validated_data): user = User.objects.create_user(**validated_data) return user", "= {'password': {'required': True, 'write_only': True}} def create(self, validated_data): user = User.objects.create_user(**validated_data) return", "= Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy = validated_data['filmy']", "Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki", "True}} def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta:", "class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta: model = Aktor fields =", "ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ['id',", "fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki =", "= ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki',", "['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy = validated_data['filmy'] # del validated_data['filmy'] #", "fields = ['id', 'username', 'email','password'] extra_kwargs = {'password': {'required': True, 'write_only': True}} def", "= User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields =", "# aktor = Aktor.objects.create(**validated_data) # for film in filmy: # f = Film.objects.create(**film)", "validated_data): user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo", "= RecenzjaSerializer(many=True) class Meta: model = Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje']", "= ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields", "class Meta: model = User fields = ['id', 'username', 'email','password'] extra_kwargs = {'password':", "model = Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class", "filmy = FilmSerializer(many=True, read_only=True) class Meta: model = Aktor fields = ['id','imie','nazwisko','filmy'] #", "import serializers from .models import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta:", "validated_data): # filmy = validated_data['filmy'] # del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) #", "aktor = Aktor.objects.create(**validated_data) # for film in filmy: # f = Film.objects.create(**film) #", "Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields =", "filmy = validated_data['filmy'] # del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for film", "'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta:", "= Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis)", "film in filmy: # f = Film.objects.create(**film) # aktor.filmy.add(f) # aktor.save() # return", "# del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for film in filmy: #", "Meta: model = Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje')", "Meta: model = User fields = ['id', 'username', 'email','password'] extra_kwargs = {'password': {'required':", "True, 'write_only': True}} def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer):", "# filmy = validated_data['filmy'] # del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for", "= validated_data['filmy'] # del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for film in", "class Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model", "RecenzjaSerializer(many=True) class Meta: model = Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields", "create(self, validated_data): # filmy = validated_data['filmy'] # del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data)", "instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False)", "from django.contrib.auth.models import User from rest_framework import serializers from .models import Film, ExtraInfo,", "rest_framework import serializers from .models import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class", "Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model =", "validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info =", "instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance", "class Meta: model = Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis", "from rest_framework import serializers from .models import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer):", "AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta: model = Aktor fields = ['id','imie','nazwisko','filmy']", "instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer):", "{'required': True, 'write_only': True}} def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class", "model = Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis = validated_data.get('opis',", "validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True)", "fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy = validated_data['filmy'] # del", "recenzje = RecenzjaSerializer(many=True) class Meta: model = Film fields = ['id','tytul', 'opis', 'po_premierze',", "FilmSerializer(many=True, read_only=True) class Meta: model = Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self,", "Aktor.objects.create(**validated_data) # for film in filmy: # f = Film.objects.create(**film) # aktor.filmy.add(f) #", "class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model = Film", "User fields = ['id', 'username', 'email','password'] extra_kwargs = {'password': {'required': True, 'write_only': True}}", "class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class", "= ['id', 'username', 'email','password'] extra_kwargs = {'password': {'required': True, 'write_only': True}} def create(self,", "class Meta: model = Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields =", "class Meta: model = Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data): #", "create(self, validated_data): user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model =", "import User from rest_framework import serializers from .models import Film, ExtraInfo, Recenzja, Aktor", "extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model = Film fields =", "instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta:", "class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance,", "fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy =", "validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for film in filmy: # f =", "User from rest_framework import serializers from .models import Film, ExtraInfo, Recenzja, Aktor class", "= ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields = ('id','opis','gwiazdki','film') def", "class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ['id', 'username', 'email','password'] extra_kwargs", "'email','password'] extra_kwargs = {'password': {'required': True, 'write_only': True}} def create(self, validated_data): user =", "def create(self, validated_data): # filmy = validated_data['filmy'] # del validated_data['filmy'] # aktor =", "'write_only': True}} def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class", "serializers from .models import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model", "Meta: model = Recenzja fields = ('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis =", "['id', 'username', 'email','password'] extra_kwargs = {'password': {'required': True, 'write_only': True}} def create(self, validated_data):", "User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj']", "= validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje =", "= User fields = ['id', 'username', 'email','password'] extra_kwargs = {'password': {'required': True, 'write_only':", "fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields = ('id','opis','gwiazdki','film')", "= Aktor.objects.create(**validated_data) # for film in filmy: # f = Film.objects.create(**film) # aktor.filmy.add(f)", "model = ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja", "Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy = validated_data['filmy'] #", "'username', 'email','password'] extra_kwargs = {'password': {'required': True, 'write_only': True}} def create(self, validated_data): user", "validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class", "ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model = Film fields = ['id','tytul', 'opis',", "['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields = ('id','opis','gwiazdki','film') def create(self,", "user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer):", "= Film fields = ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer):", "def create(self, validated_data): user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model", "for film in filmy: # f = Film.objects.create(**film) # aktor.filmy.add(f) # aktor.save() #", "model = Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy =", "ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta:", "from .models import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model =", "import Film, ExtraInfo, Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields", "read_only=True) class Meta: model = Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data):", "# for film in filmy: # f = Film.objects.create(**film) # aktor.filmy.add(f) # aktor.save()", "['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True)", "in filmy: # f = Film.objects.create(**film) # aktor.filmy.add(f) # aktor.save() # return aktor", "return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields = ['czas_trwania','rodzaj'] class", "def create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save()", "UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ['id', 'username', 'email','password'] extra_kwargs =", "('id','opis','gwiazdki','film') def create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki)", "= FilmSerializer(many=True, read_only=True) class Meta: model = Aktor fields = ['id','imie','nazwisko','filmy'] # def", "del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for film in filmy: # f", "# def create(self, validated_data): # filmy = validated_data['filmy'] # del validated_data['filmy'] # aktor", "ExtraInfo fields = ['czas_trwania','rodzaj'] class RecenzjaSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Recenzja fields =", "create(self, instance, validated_data): instance.opis = validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return", "Recenzja, Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ['id', 'username',", "validated_data['filmy'] # del validated_data['filmy'] # aktor = Aktor.objects.create(**validated_data) # for film in filmy:", "Meta: model = Aktor fields = ['id','imie','nazwisko','filmy'] # def create(self, validated_data): # filmy", "= ['id','tytul', 'opis', 'po_premierze', 'premiera','rok','imdb_rating','extra_info','recenzje'] read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True,", "return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model", "= ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta: model = Aktor", "Aktor class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ['id', 'username', 'email','password']", "= validated_data.get('opis', instance.opis) instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info", "FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model = Film fields", "extra_kwargs = {'password': {'required': True, 'write_only': True}} def create(self, validated_data): user = User.objects.create_user(**validated_data)", "django.contrib.auth.models import User from rest_framework import serializers from .models import Film, ExtraInfo, Recenzja,", "instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class", "= ExtraInfoSerializer(many=False) recenzje = RecenzjaSerializer(many=True) class Meta: model = Film fields = ['id','tytul',", "instance.gwiazdki = validated_data.get('gwiazki', instance.gwiazdki) instance.save() return instance class FilmSerializer(serializers.HyperlinkedModelSerializer): extra_info = ExtraInfoSerializer(many=False) recenzje", "user = User.objects.create_user(**validated_data) return user class ExtraInfoSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ExtraInfo fields", "read_only_fields = ('extra_info','recenzje') class AktorSerializer(serializers.HyperlinkedModelSerializer): filmy = FilmSerializer(many=True, read_only=True) class Meta: model =" ]
[ "from __future__ import (division, absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file import SVGFile", "self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4))", "from file_metadata.image.svg_file import SVGFile from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self):", "name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape,", "self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self):", "def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with", "import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1)", "uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100,", "(division, absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file import SVGFile from tests import", "SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut:", "with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as", "SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut:", "def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with", "(260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3))", "445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self):", "SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape,", "fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name", "coding: utf-8 -*- from __future__ import (division, absolute_import, unicode_literals, print_function) import os from", "-*- from __future__ import (division, absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file import", "__future__ import (division, absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file import SVGFile from", "4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self):", "test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg'))", "as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape,", "def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with", "SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with", "uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200,", "file_metadata.image.svg_file import SVGFile from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut", "SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name))", "300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data = uut.analyze_file_format() self.assertIn('Composite:FileFormat', data)", "test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as", "class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0]", "def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data = uut.analyze_file_format() self.assertIn('Composite:FileFormat', data) self.assertEqual(data['Composite:FileFormat'], 'svg')", "self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300,", "self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data = uut.analyze_file_format()", "unicode_literals, print_function) import os from file_metadata.image.svg_file import SVGFile from tests import fetch_file, unittest", "self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4))", "os from file_metadata.image.svg_file import SVGFile from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def", "200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def", "(300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data = uut.analyze_file_format() self.assertIn('Composite:FileFormat',", "SVGFile from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg'))", "uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300,", "def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close()", "test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name))", "uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def", "(100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def", "= tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369,", "with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as", "as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data", "def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg'))", "-*- coding: utf-8 -*- from __future__ import (division, absolute_import, unicode_literals, print_function) import os", "unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name =", "(369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def", "self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100))", "self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as", "as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape,", "absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file import SVGFile from tests import fetch_file,", "1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut:", "4)) def test_fetch_svg_ndarray(self): with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with", "100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self):", "import (division, absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file import SVGFile from tests", "= SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self):", "<gh_stars>1-10 # -*- coding: utf-8 -*- from __future__ import (division, absolute_import, unicode_literals, print_function)", "test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260, 200, 4)) def test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg'))", "# -*- coding: utf-8 -*- from __future__ import (division, absolute_import, unicode_literals, print_function) import", "SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut:", "import os from file_metadata.image.svg_file import SVGFile from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase):", "tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames),", "self.assertTrue(uut.fetch('filename_raster').endswith('.png')) self.assertEqual(len(uut.temp_filenames), 1) name = tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg'))", "uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445, 4)) def", "3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data = uut.analyze_file_format() self.assertIn('Composite:FileFormat', data) self.assertEqual(data['Composite:FileFormat'],", "with SVGFile(fetch_file('image_svg_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut:", "tuple(uut.temp_filenames)[0] self.assertTrue(os.path.exists(name)) uut.close() self.assertFalse(os.path.exists(name)) def test_fetch_svg_ndarray_application_xml(self): with SVGFile(fetch_file('application_xml.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (369, 445,", "print_function) import os from file_metadata.image.svg_file import SVGFile from tests import fetch_file, unittest class", "as uut: self.assertEqual(uut.fetch('ndarray').shape, (100, 100)) def test_fetch_svg_ndarray_text_html(self): with SVGFile(fetch_file('text_html.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (260,", "import SVGFile from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut =", "utf-8 -*- from __future__ import (division, absolute_import, unicode_literals, print_function) import os from file_metadata.image.svg_file", "with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as", "from tests import fetch_file, unittest class SVGFileTest(unittest.TestCase): def test_svg_fetch_filename_raster(self): uut = SVGFile(fetch_file('image_svg_xml.svg')) self.assertTrue(uut.fetch('filename_raster').endswith('.png'))", "uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg')) as uut: data =", "test_fetch_svg_ndarray_text_plain(self): with SVGFile(fetch_file('text_plain.svg')) as uut: self.assertEqual(uut.fetch('ndarray').shape, (300, 300, 3)) def test_file_format(self): with SVGFile(fetch_file('text_plain.svg'))" ]
[ "xr.DataArray: \"\"\" Return the smooth kernel used in the first two pcs \"\"\"", "smooth_kernel(self, smooth_kernel: List) -> None: \"\"\" Set a new smooth kernel to be", "2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std ) self.anom_smooth_pcs =", "used in the computation of the E and C index \"\"\" return self._corrected_pcs()", "xr.DataArray: \"\"\" Computes the mean from the selected El Niño zone, also know", "Return the first two principal components rotated, also known as the E and", "-> xr.Dataset: \"\"\" Compute the E and C index \"\"\" _pcs = self._corrected_pcs()", "def pcs_smooth(self) -> xr.DataArray: \"\"\" Return the first two principal components smoothed with", "xr.Dataset: \"\"\" Return the first two principal components rotated, also known as the", "None: climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology if not isanomaly: self.sst_data =", "known as the E and C index \"\"\" return self._compute_index() @property def ecindex_smooth(self)", "sign of known events for the E and C index. \"\"\" _eofs =", "= compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is None: self._auto_corr_factor() else:", "numpy as np import xarray as xr from eofs.xarray import Eof from .core", "self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str = \"34\") -> xr.DataArray: \"\"\" Computes the", "the first two principal components rotated, also known as the E and C", "@property def smooth_kernel(self) -> xr.DataArray: \"\"\" Return the smooth kernel used in the", "as the E and C index \"\"\" return self._compute_index() @property def ecindex_smooth(self) ->", "methods to compute a variety of indices used to study ENSO \"\"\" from", "according to Takahashi \"\"\" def __init__( self, sst_data: xr.DataArray, isanomaly: bool = False,", "\"\"\" Set a new correction factor to be applied to the first two", "_eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0]", "new_corr_factor def _compute_index(self, smooth: bool = False) -> xr.Dataset: \"\"\" Compute the E", "Return the smooth kernel used in the first two pcs \"\"\" return self._smooth_kernel", "two principal components smoothed and rotated, also known as the E and C", "Return the pcs with the correction factor applied \"\"\" return self.anom_pcs * self.corr_factor", "a new smooth kernel to be applied to the first two pcs \"\"\"", "the E and C index \"\"\" return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray:", "study ENSO \"\"\" from typing import List, Optional, Tuple import numpy as np", "kernel.sum(), dims=[\"time\"]) @property def pcs(self) -> xr.DataArray: \"\"\" Return the first two principal", "\"\"\" Compute the E and C index \"\"\" _pcs = self._corrected_pcs() if smooth", "= compute_climatology(self.sst_data, base_period) self.climatology = climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology)", "first two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None:", "known events for the E and C index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset", "smooth: bool = False) -> xr.Dataset: \"\"\" Compute the E and C index", "= np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1]", "/ clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return the", "\"\"\" _pcs = self._corrected_pcs() if smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\")", "dims=[\"time\"]) @property def pcs(self) -> xr.DataArray: \"\"\" Return the first two principal components", "import xarray as xr from eofs.xarray import Eof from .core import compute_anomaly, compute_climatology,", "ENSO \"\"\" from typing import List, Optional, Tuple import numpy as np import", "selected El Niño zone, also know as El Niño Index for each of", "factor applied to the first two pcs \"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self,", "the principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts =", "the first two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) ->", "@property def pcs(self) -> xr.DataArray: \"\"\" Return the first two principal components used", "Return the correction factor applied to the first two pcs \"\"\" return self._corr_factor", "eindex = (pc1 - pc2) / (2 ** (1 / 2)) eindex.name =", "def ecindex(self) -> xr.Dataset: \"\"\" Return the first two principal components rotated, also", "** (1 / 2)) eindex.name = \"E_index\" cindex = (pc1 + pc2) /", "slice(-5, 5), \"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5), \"lon\": slice(160, 210)}, }", "self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property def smooth_kernel(self) -> xr.DataArray:", "correction factor by estimating the sign of known events for the E and", "in the first two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List)", "0), \"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210, 270)}, \"34\": {\"lat\":", "pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property def smooth_kernel(self)", "also known as the E and C index \"\"\" return self._compute_index(smooth=True) def enzones(data:", "= ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std ) self.anom_smooth_pcs = None def", "-1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor =", "@property def corr_factor(self) -> xr.DataArray: \"\"\" Return the correction factor applied to the", "2, 1], ): self.sst_data = sst_data self.base_period = base_period if climatology is None:", "xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1 -", "used in the first two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel:", "of known events for the E and C index. \"\"\" _eofs = self.solver.eofs(neofs=2)", "-> xr.DataArray: \"\"\" Computes the mean from the selected El Niño zone, also", "variety of indices used to study ENSO \"\"\" from typing import List, Optional,", "slice(-5, 5), \"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190, 240)}, \"4\":", "def _compute_pcs(self) -> None: \"\"\" Compute the principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10,", "base_period if climatology is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology if", "(\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] = [1, 2, 1], ):", "self._corrected_pcs() if smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0)", "np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1] =", ".core import compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\" Computes the E and C", "lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0", "return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the first two principal", "self.smooth_kernel = smooth_kernel if corr_factor is None: self._auto_corr_factor() else: self.corr_factor = corr_factor def", "Compute the principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts", "* self.corr_factor def _auto_corr_factor(self) -> None: \"\"\" Automatically determine the correction factor by", "new correction factor to be applied to the first two pcs \"\"\" self._corr_factor", "two pcs \"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None: \"\"\"", "and C index \"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str = \"34\")", "slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5), \"lon\": slice(160, 210)}, } return data.sel(**zones[zone]).mean(dim=[\"lat\", \"lon\"])", "Eof from .core import compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\" Computes the E", "and rotated, also known as the E and C index \"\"\" return self._compute_index(smooth=True)", "factor applied \"\"\" return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None: \"\"\" Automatically", "-> xr.DataArray: \"\"\" Return the pcs with the correction factor applied \"\"\" return", "return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: \"\"\" Return the first two principal", "\"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5,", "= None, smooth_kernel: List[int] = [1, 2, 1], ): self.sst_data = sst_data self.base_period", "from eofs.xarray import Eof from .core import compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\"", "+ pc2) / (2 ** (1 / 2)) cindex.name = \"C_index\" return xr.merge([eindex,", "dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1 - pc2) /", "= climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel", "bool = False) -> xr.Dataset: \"\"\" Compute the E and C index \"\"\"", "the correction factor applied \"\"\" return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None:", "smooth_kernel: List[int] = [1, 2, 1], ): self.sst_data = sst_data self.base_period = base_period", "the pcs with the correction factor applied \"\"\" return self.anom_pcs * self.corr_factor def", "_eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() <", "components smoothed and rotated, also known as the E and C index \"\"\"", "Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs = ( self.solver.projectField(", "(2 ** (1 / 2)) cindex.name = \"C_index\" return xr.merge([eindex, cindex]) @property def", "self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool = False) -> xr.Dataset: \"\"\" Compute", "-> xr.Dataset: \"\"\" Return the first two principal components rotated, also known as", "/ (2 ** (1 / 2)) eindex.name = \"E_index\" cindex = (pc1 +", "= 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor = new_corr_factor def", "True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex", "_pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1 - pc2) / (2 ** (1", "eindex.name = \"E_index\" cindex = (pc1 + pc2) / (2 ** (1 /", "self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return the pcs with the", "= \"C_index\" return xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray: \"\"\" Return the", "compute a variety of indices used to study ENSO \"\"\" from typing import", "xr.DataArray: \"\"\" Return the first two principal components used in the computation of", "zones = { \"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5,", "self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None: \"\"\" Automatically determine the correction factor", "import Eof from .core import compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\" Computes the", "\"\"\" return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None: \"\"\" Automatically determine the", "determine the correction factor by estimating the sign of known events for the", "{\"lat\": slice(-5, 5), \"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5), \"lon\": slice(160, 210)},", "slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5),", "def __init__( self, sst_data: xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray] = None,", "of indices used to study ENSO \"\"\" from typing import List, Optional, Tuple", "np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) **", "smoothed with the specified smooth_kernel \"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve(", "mean from the selected El Niño zone, also know as El Niño Index", "ECindex: \"\"\" Computes the E and C index according to Takahashi \"\"\" def", "Module containing the definitions and methods to compute a variety of indices used", "xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray: \"\"\" Return the correction factor applied", "compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\" Computes the E and C index according", "to be applied to the first two pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor),", "the selected El Niño zone, also know as El Niño Index for each", "\"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] = [1, 2, 1], ): self.sst_data", "new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1", "and methods to compute a variety of indices used to study ENSO \"\"\"", "events for the E and C index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset =", "the definitions and methods to compute a variety of indices used to study", "typing import List, Optional, Tuple import numpy as np import xarray as xr", "np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def pcs(self) -> xr.DataArray: \"\"\"", "Set a new smooth kernel to be applied to the first two pcs", "_compute_index(self, smooth: bool = False) -> xr.Dataset: \"\"\" Compute the E and C", "base_period) self.climatology = climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel", "None: \"\"\" Set a new correction factor to be applied to the first", "known as the E and C index \"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray,", "as np import xarray as xr from eofs.xarray import Eof from .core import", "= False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor:", "** (1 / 2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std", "be applied to the first two pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\",", "the sign of known events for the E and C index. \"\"\" _eofs", "= corr_factor def _compute_pcs(self) -> None: \"\"\" Compute the principal components \"\"\" _subset", "\"E_index\" cindex = (pc1 + pc2) / (2 ** (1 / 2)) cindex.name", "the E and C index according to Takahashi \"\"\" def __init__( self, sst_data:", "= self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, )", "List, Optional, Tuple import numpy as np import xarray as xr from eofs.xarray", "np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property def smooth_kernel(self) -> xr.DataArray: \"\"\" Return the", "( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self)", "\"34\") -> xr.DataArray: \"\"\" Computes the mean from the selected El Niño zone,", "@smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None: \"\"\" Set a new smooth kernel", "List[int]) -> None: \"\"\" Set a new correction factor to be applied to", "self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is None: self._auto_corr_factor()", "pcs with the correction factor applied \"\"\" return self.anom_pcs * self.corr_factor def _auto_corr_factor(self)", "Set a new correction factor to be applied to the first two pcs", "self.climatology = climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel =", "weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"),", "E and C index according to Takahashi \"\"\" def __init__( self, sst_data: xr.DataArray,", "E and C index \"\"\" return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: \"\"\"", "self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the first two principal components", "\"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver", "\"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210,", "else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor", "\"\"\" Module containing the definitions and methods to compute a variety of indices", "is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1)", "pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None: \"\"\" Set", "and C index \"\"\" return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return", "neofs=2, ) / clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray: \"\"\"", "index \"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str = \"34\") -> xr.DataArray:", "(2 ** (1 / 2)) eindex.name = \"E_index\" cindex = (pc1 + pc2)", "= (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] = [1, 2, 1],", "self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is None: self._auto_corr_factor() else: self.corr_factor = corr_factor", "climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if", "None: \"\"\" Set a new smooth kernel to be applied to the first", "= dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0,", "the first two principal components smoothed and rotated, also known as the E", "cindex = (pc1 + pc2) / (2 ** (1 / 2)) cindex.name =", "\"\"\" Return the pcs with the correction factor applied \"\"\" return self.anom_pcs *", "pcs_smooth(self) -> xr.DataArray: \"\"\" Return the first two principal components smoothed with the", "self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts)", "first two pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"])", "{ \"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\":", "-> None: \"\"\" Set a new correction factor to be applied to the", "return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str = \"34\") -> xr.DataArray: \"\"\" Computes", "else -1 self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool = False) -> xr.Dataset:", "False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]]", "-> xr.DataArray: \"\"\" Return the correction factor applied to the first two pcs", "pcs(self) -> xr.DataArray: \"\"\" Return the first two principal components used in the", "self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property", "index \"\"\" return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: \"\"\" Return the first", "components smoothed with the specified smooth_kernel \"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs =", "5), \"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190, 240)}, \"4\": {\"lat\":", "the first two pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], )", "1])], ) @property def smooth_kernel(self) -> xr.DataArray: \"\"\" Return the smooth kernel used", "= xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1", "Niño zone, also know as El Niño Index for each of the zones.", "pc2) / (2 ** (1 / 2)) cindex.name = \"C_index\" return xr.merge([eindex, cindex])", "each of the zones. \"\"\" zones = { \"12\": {\"lat\": slice(-10, 0), \"lon\":", "dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean()", "if _eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean()", "the correction factor applied to the first two pcs \"\"\" return self._corr_factor @corr_factor.setter", "know as El Niño Index for each of the zones. \"\"\" zones =", "np import xarray as xr from eofs.xarray import Eof from .core import compute_anomaly,", "isanomaly: bool = False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str] = (\"1979-01-01\",", "List) -> None: \"\"\" Set a new smooth kernel to be applied to", "self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def pcs(self) -> xr.DataArray: \"\"\" Return", "sst_data: xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str]", "and C index \"\"\" return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: \"\"\" Return", "{\"lat\": slice(-5, 5), \"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190, 240)},", "250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else", "slice(-10, 0), \"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210, 270)}, \"34\":", "zone: str = \"34\") -> xr.DataArray: \"\"\" Computes the mean from the selected", "for each of the zones. \"\"\" zones = { \"12\": {\"lat\": slice(-10, 0),", "-> xr.Dataset: \"\"\" Return the first two principal components smoothed and rotated, also", "return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None: \"\"\" Automatically determine the correction", "self.corr_factor = corr_factor def _compute_pcs(self) -> None: \"\"\" Compute the principal components \"\"\"", "indices used to study ENSO \"\"\" from typing import List, Optional, Tuple import", "E and C index \"\"\" _pcs = self._corrected_pcs() if smooth is True: _pcs", "return xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray: \"\"\" Return the correction factor", "= 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1] = 1 if", "def smooth_kernel(self, smooth_kernel: List) -> None: \"\"\" Set a new smooth kernel to", "components used in the computation of the E and C index \"\"\" return", "= np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def pcs(self) -> xr.DataArray:", "\"\"\" Automatically determine the correction factor by estimating the sign of known events", "ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the first two principal components smoothed and rotated,", "= base_period if climatology is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology", "for the E and C index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2,", "1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1,", "self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self) ->", "None: \"\"\" Compute the principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat =", "applied to the first two pcs \"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor:", "Niño Index for each of the zones. \"\"\" zones = { \"12\": {\"lat\":", "List[int] = [1, 2, 1], ): self.sst_data = sst_data self.base_period = base_period if", "xarray as xr from eofs.xarray import Eof from .core import compute_anomaly, compute_climatology, xconvolve", "_subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if", "the first two principal components smoothed with the specified smooth_kernel \"\"\" if self.anom_smooth_pcs", "from the selected El Niño zone, also know as El Niño Index for", "two pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property def", "return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: \"\"\" Return the first two principal", "-> xr.DataArray: \"\"\" Return the first two principal components used in the computation", "= _pcs.sel(mode=1) eindex = (pc1 - pc2) / (2 ** (1 / 2))", "self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: \"\"\" Return", "(pc1 - pc2) / (2 ** (1 / 2)) eindex.name = \"E_index\" cindex", "= np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2)", "self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None: \"\"\" Set a new smooth", "and C index \"\"\" _pcs = self._corrected_pcs() if smooth is True: _pcs =", "enzones(data: xr.DataArray, zone: str = \"34\") -> xr.DataArray: \"\"\" Computes the mean from", "kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def pcs(self) ->", "\"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5), \"lon\": slice(160, 210)}, } return data.sel(**zones[zone]).mean(dim=[\"lat\",", "= None def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return the pcs with the correction", "principal components smoothed and rotated, also known as the E and C index", "smooth_kernel if corr_factor is None: self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self) ->", "@property def ecindex(self) -> xr.Dataset: \"\"\" Return the first two principal components rotated,", "corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] = [1, 2, 1], ): self.sst_data =", "from typing import List, Optional, Tuple import numpy as np import xarray as", "new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() > 0 else -1 new_corr_factor[1] = 1", "\"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None: \"\"\" Set a", "= \"34\") -> xr.DataArray: \"\"\" Computes the mean from the selected El Niño", "C index \"\"\" return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the", "smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 =", "also know as El Niño Index for each of the zones. \"\"\" zones", "self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1 - pc2)", "[1, 2, 1], ): self.sst_data = sst_data self.base_period = base_period if climatology is", "to the first two pcs \"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int])", "self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs =", "/ kernel.sum(), dims=[\"time\"]) @property def pcs(self) -> xr.DataArray: \"\"\" Return the first two", "\"\"\" Set a new smooth kernel to be applied to the first two", "self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1", "None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property def ecindex(self)", "_pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex =", "self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self) -> None: \"\"\" Compute the principal", "applied to the first two pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel", "index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor =", "class ECindex: \"\"\" Computes the E and C index according to Takahashi \"\"\"", "the first two pcs \"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) ->", "xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: \"\"\"", "xr.DataArray: \"\"\" Return the correction factor applied to the first two pcs \"\"\"", "definitions and methods to compute a variety of indices used to study ENSO", "_auto_corr_factor(self) -> None: \"\"\" Automatically determine the correction factor by estimating the sign", "slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5),", "cindex]) @property def corr_factor(self) -> xr.DataArray: \"\"\" Return the correction factor applied to", "= xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def pcs(self) -> xr.DataArray: \"\"\" Return the", "to compute a variety of indices used to study ENSO \"\"\" from typing", "= \"E_index\" cindex = (pc1 + pc2) / (2 ** (1 / 2))", "Computes the E and C index according to Takahashi \"\"\" def __init__( self,", "if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor", "self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None: \"\"\" Set a new correction", "xr from eofs.xarray import Eof from .core import compute_anomaly, compute_climatology, xconvolve class ECindex:", "_subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver =", "El Niño zone, also know as El Niño Index for each of the", "smooth_kernel(self) -> xr.DataArray: \"\"\" Return the smooth kernel used in the first two", "sst_data self.base_period = base_period if climatology is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology", "0 else -1 self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool = False) ->", "None def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return the pcs with the correction factor", "def smooth_kernel(self) -> xr.DataArray: \"\"\" Return the smooth kernel used in the first", "with the specified smooth_kernel \"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(),", "dim=\"time\", ) return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: \"\"\" Return the first", "also known as the E and C index \"\"\" return self._compute_index() @property def", "C index \"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str = \"34\") ->", "zones. \"\"\" zones = { \"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270, 280)}, \"3\":", "else: self.corr_factor = corr_factor def _compute_pcs(self) -> None: \"\"\" Compute the principal components", "None: \"\"\" Automatically determine the correction factor by estimating the sign of known", "= sst_data self.base_period = base_period if climatology is None: climatology = compute_climatology(self.sst_data, base_period)", "to be applied to the first two pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel", "first two principal components smoothed with the specified smooth_kernel \"\"\" if self.anom_smooth_pcs is", "xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def pcs(self) -> xr.DataArray: \"\"\" Return the first", "\"\"\" return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: \"\"\" Return the first two", "\"\"\" Compute the principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data))", "isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is None:", "def enzones(data: xr.DataArray, zone: str = \"34\") -> xr.DataArray: \"\"\" Computes the mean", "= (pc1 + pc2) / (2 ** (1 / 2)) cindex.name = \"C_index\"", "two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None: \"\"\"", "self.base_period = base_period if climatology is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology =", "\"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190,", "the first two principal components used in the computation of the E and", "= Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs = (", "the computation of the E and C index \"\"\" return self._corrected_pcs() @property def", "(1 / 2)) eindex.name = \"E_index\" cindex = (pc1 + pc2) / (2", "@property def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the first two principal components smoothed", "bool = False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"),", ") return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: \"\"\" Return the first two", "Optional[xr.DataArray] = None, base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None,", "/ 2)) cindex.name = \"C_index\" return xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray:", "_compute_pcs(self) -> None: \"\"\" Compute the principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10))", ") / clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return", "return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self, smooth_kernel: List) -> None: \"\"\" Set a new", "index \"\"\" _pcs = self._corrected_pcs() if smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel,", "principal components smoothed with the specified smooth_kernel \"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs", "= smooth_kernel if corr_factor is None: self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self)", "corr_factor: List[int]) -> None: \"\"\" Set a new correction factor to be applied", "first two pcs \"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None:", "Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] = [1,", "import numpy as np import xarray as xr from eofs.xarray import Eof from", "def corr_factor(self) -> xr.DataArray: \"\"\" Return the correction factor applied to the first", "\"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5,", "is None: self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self) -> None: \"\"\" Compute", "the E and C index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2),", "import compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\" Computes the E and C index", "to the first two pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel /", "Automatically determine the correction factor by estimating the sign of known events for", "with the correction factor applied \"\"\" return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) ->", "-> None: \"\"\" Set a new smooth kernel to be applied to the", "principal components used in the computation of the E and C index \"\"\"", "[0, 1])], ) @property def smooth_kernel(self) -> xr.DataArray: \"\"\" Return the smooth kernel", "if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs", "smoothed and rotated, also known as the E and C index \"\"\" return", "-> xr.DataArray: \"\"\" Return the smooth kernel used in the first two pcs", "self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: \"\"\" Return the first two principal components", "(pc1 + pc2) / (2 ** (1 / 2)) cindex.name = \"C_index\" return", "the specified smooth_kernel \"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel,", "smooth_kernel: List) -> None: \"\"\" Set a new smooth kernel to be applied", "self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset: \"\"\" Return the", "np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs", "None, smooth_kernel: List[int] = [1, 2, 1], ): self.sst_data = sst_data self.base_period =", "pcs \"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None: \"\"\" Set", "self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: \"\"\" Return the first two principal components", "coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std =", "xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str] =", "two pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property", "not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is", "new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor = new_corr_factor", "_eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool", "Tuple import numpy as np import xarray as xr from eofs.xarray import Eof", "compute_climatology, xconvolve class ECindex: \"\"\" Computes the E and C index according to", "climatology: Optional[xr.DataArray] = None, base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] =", "estimating the sign of known events for the E and C index. \"\"\"", "270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5), \"lon\":", "two principal components used in the computation of the E and C index", "as the E and C index \"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone:", "climatology is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology if not isanomaly:", "Optional, Tuple import numpy as np import xarray as xr from eofs.xarray import", "a variety of indices used to study ENSO \"\"\" from typing import List,", "of the zones. \"\"\" zones = { \"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270,", "2)) eindex.name = \"E_index\" cindex = (pc1 + pc2) / (2 ** (1", "\"\"\" def __init__( self, sst_data: xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray] =", "def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return the pcs with the correction factor applied", "xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property def smooth_kernel(self) -> xr.DataArray: \"\"\" Return", "= self._corrected_pcs() if smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 =", "1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor = new_corr_factor def _compute_index(self,", "the E and C index \"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str", "**_subset).mean() > 0 else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0", "two principal components rotated, also known as the E and C index \"\"\"", "\"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str = \"34\") -> xr.DataArray: \"\"\"", "0 else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else -1", "Return the first two principal components used in the computation of the E", "None: self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self) -> None: \"\"\" Compute the", "< 0 else -1 self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool = False)", "= xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property def ecindex(self) -> xr.Dataset:", "cindex.name = \"C_index\" return xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray: \"\"\" Return", "\"\"\" _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2)", ") self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return the pcs with", "xr.DataArray, zone: str = \"34\") -> xr.DataArray: \"\"\" Computes the mean from the", "compute_anomaly(self.sst_data, self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is None: self._auto_corr_factor() else: self.corr_factor", "applied \"\"\" return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None: \"\"\" Automatically determine", "rotated, also known as the E and C index \"\"\" return self._compute_index() @property", "/ (2 ** (1 / 2)) cindex.name = \"C_index\" return xr.merge([eindex, cindex]) @property", "of the E and C index \"\"\" return self._corrected_pcs() @property def pcs_smooth(self) ->", "\"\"\" Return the first two principal components used in the computation of the", "> 0 else -1 new_corr_factor[1] = 1 if _eofs.sel(mode=1, **_subset).mean() < 0 else", "\"\"\" from typing import List, Optional, Tuple import numpy as np import xarray", "if corr_factor is None: self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self) -> None:", "\"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def pcs(self)", "clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2,", "None, base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int]", "to the first two pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])],", "pc1 = _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1 - pc2) / (2", "E and C index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210,", "\"\"\" Return the smooth kernel used in the first two pcs \"\"\" return", "-> None: \"\"\" Automatically determine the correction factor by estimating the sign of", "\"\"\" Return the first two principal components smoothed and rotated, also known as", "Return the first two principal components smoothed and rotated, also known as the", "correction factor applied to the first two pcs \"\"\" return self._corr_factor @corr_factor.setter def", "Return the first two principal components smoothed with the specified smooth_kernel \"\"\" if", "applied to the first two pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0,", "def pcs(self) -> xr.DataArray: \"\"\" Return the first two principal components used in", "to study ENSO \"\"\" from typing import List, Optional, Tuple import numpy as", "correction factor to be applied to the first two pcs \"\"\" self._corr_factor =", "climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data,", "first two pcs \"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property", "is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property def", "def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the first two principal components smoothed and", "): self.sst_data = sst_data self.base_period = base_period if climatology is None: climatology =", "eofs.xarray import Eof from .core import compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\" Computes", "index according to Takahashi \"\"\" def __init__( self, sst_data: xr.DataArray, isanomaly: bool =", "corr_factor(self) -> xr.DataArray: \"\"\" Return the correction factor applied to the first two", "str = \"34\") -> xr.DataArray: \"\"\" Computes the mean from the selected El", "containing the definitions and methods to compute a variety of indices used to", "pc2) / (2 ** (1 / 2)) eindex.name = \"E_index\" cindex = (pc1", "be applied to the first two pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel =", "correction factor applied \"\"\" return self.anom_pcs * self.corr_factor def _auto_corr_factor(self) -> None: \"\"\"", "_corrected_pcs(self) -> xr.DataArray: \"\"\" Return the pcs with the correction factor applied \"\"\"", "def _auto_corr_factor(self) -> None: \"\"\" Automatically determine the correction factor by estimating the", "\"34\": {\"lat\": slice(-5, 5), \"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5), \"lon\": slice(160,", "= [1, 2, 1], ): self.sst_data = sst_data self.base_period = base_period if climatology", "E and C index \"\"\" return self._compute_index(smooth=True) def enzones(data: xr.DataArray, zone: str =", "self.sst_data = sst_data self.base_period = base_period if climatology is None: climatology = compute_climatology(self.sst_data,", "first two principal components rotated, also known as the E and C index", "\"\"\" zones = { \"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270, 280)}, \"3\": {\"lat\":", "self, sst_data: xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray] = None, base_period: Tuple[str,", "= (pc1 - pc2) / (2 ** (1 / 2)) eindex.name = \"E_index\"", "- pc2) / (2 ** (1 / 2)) eindex.name = \"E_index\" cindex =", "kernel used in the first two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def smooth_kernel(self,", "5), \"lon\": slice(190, 240)}, \"4\": {\"lat\": slice(-5, 5), \"lon\": slice(160, 210)}, } return", "factor to be applied to the first two pcs \"\"\" self._corr_factor = xr.DataArray(", "Takahashi \"\"\" def __init__( self, sst_data: xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray]", "@corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None: \"\"\" Set a new correction factor", "self.corr_factor def _auto_corr_factor(self) -> None: \"\"\" Automatically determine the correction factor by estimating", "False) -> xr.Dataset: \"\"\" Compute the E and C index \"\"\" _pcs =", "principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[...,", "the mean from the selected El Niño zone, also know as El Niño", "\"\"\" Computes the E and C index according to Takahashi \"\"\" def __init__(", "wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1", "2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] = 1 if _eofs.sel(mode=0, **_subset).mean() >", "280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210, 270)}, \"34\": {\"lat\": slice(-5, 5), \"lon\":", "**_subset).mean() < 0 else -1 self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool =", "if _eofs.sel(mode=1, **_subset).mean() < 0 else -1 self.corr_factor = new_corr_factor def _compute_index(self, smooth:", "C index \"\"\" _pcs = self._corrected_pcs() if smooth is True: _pcs = xconvolve(_pcs,", "corr_factor def _compute_pcs(self) -> None: \"\"\" Compute the principal components \"\"\" _subset =", "corr_factor(self, corr_factor: List[int]) -> None: \"\"\" Set a new correction factor to be", "principal components rotated, also known as the E and C index \"\"\" return", "is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology if not isanomaly: self.sst_data", "factor by estimating the sign of known events for the E and C", "zone, also know as El Niño Index for each of the zones. \"\"\"", "coords=[(\"mode\", [0, 1])], ) @property def smooth_kernel(self) -> xr.DataArray: \"\"\" Return the smooth", "= self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor = np.zeros(2) new_corr_factor[0] =", "self.solver.eigenvalues(neigs=2) ** (1 / 2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) /", "as xr from eofs.xarray import Eof from .core import compute_anomaly, compute_climatology, xconvolve class", "self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std ) self.anom_smooth_pcs = None", "{\"lat\": slice(-10, 0), \"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5), \"lon\": slice(210, 270)},", "\"\"\" self._corr_factor = xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property def smooth_kernel(self) ->", "(1 / 2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std )", "xr.Dataset: \"\"\" Return the first two principal components smoothed and rotated, also known", "specified smooth_kernel \"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\",", "computation of the E and C index \"\"\" return self._corrected_pcs() @property def pcs_smooth(self)", "\"\"\" Return the first two principal components rotated, also known as the E", "-1 self.corr_factor = new_corr_factor def _compute_index(self, smooth: bool = False) -> xr.Dataset: \"\"\"", "** (1 / 2)) cindex.name = \"C_index\" return xr.merge([eindex, cindex]) @property def corr_factor(self)", "np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 / 2)", "in the computation of the E and C index \"\"\" return self._corrected_pcs() @property", "self.climatology) self._compute_pcs() self.smooth_kernel = smooth_kernel if corr_factor is None: self._auto_corr_factor() else: self.corr_factor =", "the smooth kernel used in the first two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter", "pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(), dims=[\"time\"]) @property def", "-> None: \"\"\" Compute the principal components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat", "the E and C index \"\"\" return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset:", "def corr_factor(self, corr_factor: List[int]) -> None: \"\"\" Set a new correction factor to", "first two principal components smoothed and rotated, also known as the E and", "C index according to Takahashi \"\"\" def __init__( self, sst_data: xr.DataArray, isanomaly: bool", "compute_climatology(self.sst_data, base_period) self.climatology = climatology if not isanomaly: self.sst_data = compute_anomaly(self.sst_data, self.climatology) self._compute_pcs()", "\"\"\" Return the correction factor applied to the first two pcs \"\"\" return", "components rotated, also known as the E and C index \"\"\" return self._compute_index()", "the E and C index \"\"\" _pcs = self._corrected_pcs() if smooth is True:", "return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None: \"\"\" Set a new", "10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std", "/ 2) self.anom_pcs = ( self.solver.projectField( _subset.drop(\"month\"), neofs=2, ) / clim_std ) self.anom_smooth_pcs", "new smooth kernel to be applied to the first two pcs \"\"\" kernel", "\"\"\" Computes the mean from the selected El Niño zone, also know as", "used to study ENSO \"\"\" from typing import List, Optional, Tuple import numpy", "-> xr.DataArray: \"\"\" Return the first two principal components smoothed with the specified", "corr_factor is None: self._auto_corr_factor() else: self.corr_factor = corr_factor def _compute_pcs(self) -> None: \"\"\"", "smooth_kernel \"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", )", "Compute the E and C index \"\"\" _pcs = self._corrected_pcs() if smooth is", "C index \"\"\" return self._corrected_pcs() @property def pcs_smooth(self) -> xr.DataArray: \"\"\" Return the", "a new correction factor to be applied to the first two pcs \"\"\"", "= False) -> xr.Dataset: \"\"\" Compute the E and C index \"\"\" _pcs", "Optional[List[int]] = None, smooth_kernel: List[int] = [1, 2, 1], ): self.sst_data = sst_data", "xr.Dataset: \"\"\" Compute the E and C index \"\"\" _pcs = self._corrected_pcs() if", "2)) cindex.name = \"C_index\" return xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray: \"\"\"", "xr.DataArray: \"\"\" Return the pcs with the correction factor applied \"\"\" return self.anom_pcs", "Index for each of the zones. \"\"\" zones = { \"12\": {\"lat\": slice(-10,", "smooth kernel to be applied to the first two pcs \"\"\" kernel =", "kernel to be applied to the first two pcs \"\"\" kernel = np.array(smooth_kernel)", "\"\"\" return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the first two", "__init__( self, sst_data: xr.DataArray, isanomaly: bool = False, climatology: Optional[xr.DataArray] = None, base_period:", "\"\"\" Return the first two principal components smoothed with the specified smooth_kernel \"\"\"", "_subset.drop(\"month\"), neofs=2, ) / clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray:", "and C index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250))", "def _compute_index(self, smooth: bool = False) -> xr.Dataset: \"\"\" Compute the E and", "Computes the mean from the selected El Niño zone, also know as El", "= xr.DataArray( np.array(corr_factor), coords=[(\"mode\", [0, 1])], ) @property def smooth_kernel(self) -> xr.DataArray: \"\"\"", "E and C index \"\"\" return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: \"\"\"", "C index. \"\"\" _eofs = self.solver.eofs(neofs=2) _subset = dict(lat=slice(-2, 2), lon=slice(210, 250)) new_corr_factor", "/ 2)) eindex.name = \"E_index\" cindex = (pc1 + pc2) / (2 **", "two principal components smoothed with the specified smooth_kernel \"\"\" if self.anom_smooth_pcs is None:", "= np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period)), weights=wgts) clim_std = self.solver.eigenvalues(neigs=2) ** (1 /", "_pcs = self._corrected_pcs() if smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1", "rotated, also known as the E and C index \"\"\" return self._compute_index(smooth=True) def", "from .core import compute_anomaly, compute_climatology, xconvolve class ECindex: \"\"\" Computes the E and", "components \"\"\" _subset = self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis]", "and C index according to Takahashi \"\"\" def __init__( self, sst_data: xr.DataArray, isanomaly:", "\"C_index\" return xr.merge([eindex, cindex]) @property def corr_factor(self) -> xr.DataArray: \"\"\" Return the correction", "\"\"\" if self.anom_smooth_pcs is None: self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return", "if climatology is None: climatology = compute_climatology(self.sst_data, base_period) self.climatology = climatology if not", "\"\"\" return self._corr_factor @corr_factor.setter def corr_factor(self, corr_factor: List[int]) -> None: \"\"\" Set a", "if smooth is True: _pcs = xconvolve(_pcs, self._smooth_kernel, dim=\"time\") pc1 = _pcs.sel(mode=0) pc2", "= { \"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270, 280)}, \"3\": {\"lat\": slice(-5, 5),", "str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] = [1, 2,", "by estimating the sign of known events for the E and C index.", "xr.DataArray: \"\"\" Return the first two principal components smoothed with the specified smooth_kernel", "_pcs.sel(mode=1) eindex = (pc1 - pc2) / (2 ** (1 / 2)) eindex.name", "index \"\"\" return self._compute_index() @property def ecindex_smooth(self) -> xr.Dataset: \"\"\" Return the first", "= None, base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel:", "first two principal components used in the computation of the E and C", "1], ): self.sst_data = sst_data self.base_period = base_period if climatology is None: climatology", ") @property def smooth_kernel(self) -> xr.DataArray: \"\"\" Return the smooth kernel used in", "ecindex(self) -> xr.Dataset: \"\"\" Return the first two principal components rotated, also known", "to Takahashi \"\"\" def __init__( self, sst_data: xr.DataArray, isanomaly: bool = False, climatology:", "(1 / 2)) cindex.name = \"C_index\" return xr.merge([eindex, cindex]) @property def corr_factor(self) ->", "clim_std ) self.anom_smooth_pcs = None def _corrected_pcs(self) -> xr.DataArray: \"\"\" Return the pcs", "import List, Optional, Tuple import numpy as np import xarray as xr from", "@property def pcs_smooth(self) -> xr.DataArray: \"\"\" Return the first two principal components smoothed", "the zones. \"\"\" zones = { \"12\": {\"lat\": slice(-10, 0), \"lon\": slice(270, 280)},", "the correction factor by estimating the sign of known events for the E", "= new_corr_factor def _compute_index(self, smooth: bool = False) -> xr.Dataset: \"\"\" Compute the", "the first two pcs \"\"\" kernel = np.array(smooth_kernel) self._smooth_kernel = xr.DataArray(kernel / kernel.sum(),", "El Niño Index for each of the zones. \"\"\" zones = { \"12\":", "self.anom_smooth_pcs = xconvolve( self._corrected_pcs(), self._smooth_kernel, dim=\"time\", ) return self.anom_smooth_pcs @property def ecindex(self) ->", "smooth kernel used in the first two pcs \"\"\" return self._smooth_kernel @smooth_kernel.setter def", "base_period: Tuple[str, str] = (\"1979-01-01\", \"2009-12-30\"), corr_factor: Optional[List[int]] = None, smooth_kernel: List[int] =", "pc2 = _pcs.sel(mode=1) eindex = (pc1 - pc2) / (2 ** (1 /", "as El Niño Index for each of the zones. \"\"\" zones = {", "xconvolve class ECindex: \"\"\" Computes the E and C index according to Takahashi", "= _pcs.sel(mode=0) pc2 = _pcs.sel(mode=1) eindex = (pc1 - pc2) / (2 **", "= self.sst_data.sortby(\"lat\").sel(lat=slice(-10, 10)) coslat = np.cos(np.deg2rad(_subset.lat.data)) wgts = np.sqrt(coslat)[..., np.newaxis] self.solver = Eof(_subset.sel(time=slice(*self.base_period))," ]
[]
[ "calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise,", "# SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp,", "Linear, PlainLinear, PlainBiasedLinear from .padding import Padding from .pool import PlainPool, AdaptivePool, Pool", "from .padding import Padding from .pool import PlainPool, AdaptivePool, Pool from .unary import", "ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d", "ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d from .flatten import Flatten2d from .grouped_conv", "import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d from .flatten import", "BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d from .flatten import Flatten2d from", "<NAME> # SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp,", ".binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d from .flatten", "Copyright 2020 <NAME> # SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter, BaseOp, CombinedOp,", "SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp", "PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d,", "GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear from .padding import Padding from", "PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import", "SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv", "OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd,", "from .base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary", "import GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear from .padding import Padding", "from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear from .padding", "PlainLinear, PlainBiasedLinear from .padding import Padding from .pool import PlainPool, AdaptivePool, Pool from", "Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear from", "Apache-2.0 from .base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from", ".grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear from .padding import", "CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from", "PlainBiasedLinear from .padding import Padding from .pool import PlainPool, AdaptivePool, Pool from .unary", "import PlainConv2d, Conv2d from .flatten import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from", ".flatten import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear,", ".linear import Linear, PlainLinear, PlainBiasedLinear from .padding import Padding from .pool import PlainPool,", "Conv2d from .flatten import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import", "from .flatten import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import Linear,", "import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear", "BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd", "Padding from .pool import PlainPool, AdaptivePool, Pool from .unary import ReLU, ReLU6, UnaryElementwise", ".base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import", "ChannelwiseAdd from .conv import PlainConv2d, Conv2d from .flatten import Flatten2d from .grouped_conv import", "from .conv import PlainConv2d, Conv2d from .flatten import Flatten2d from .grouped_conv import GroupedConv2d,", "import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp, PolyOp, PolyTVMOp, ArgumentedOp from .binary import BinaryChannelwise,", "PlainGroupedConv2d from .linear import Linear, PlainLinear, PlainBiasedLinear from .padding import Padding from .pool", "from .linear import Linear, PlainLinear, PlainBiasedLinear from .padding import Padding from .pool import", "PlainConv2d, Conv2d from .flatten import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d from .linear", "# Copyright 2020 <NAME> # SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter, BaseOp,", "2020 <NAME> # SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter, BaseOp, CombinedOp, SequenceOp,", "from .binary import BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d from", "BinaryChannelwise, BinaryElementwise, ElementwiseAdd, ChannelwiseAdd from .conv import PlainConv2d, Conv2d from .flatten import Flatten2d", "import Padding from .pool import PlainPool, AdaptivePool, Pool from .unary import ReLU, ReLU6,", "<reponame>ModelTC/pyvlova # Copyright 2020 <NAME> # SPDX-License-Identifier: Apache-2.0 from .base import calc_mode, OpParameter,", "import Linear, PlainLinear, PlainBiasedLinear from .padding import Padding from .pool import PlainPool, AdaptivePool,", ".padding import Padding from .pool import PlainPool, AdaptivePool, Pool from .unary import ReLU,", ".conv import PlainConv2d, Conv2d from .flatten import Flatten2d from .grouped_conv import GroupedConv2d, PlainGroupedConv2d" ]
[ "sys def main(args): runstring = \"./run.sh \" + args[0] print(runstring) subprocess.call(runstring, shell=True) if", "main(args): runstring = \"./run.sh \" + args[0] print(runstring) subprocess.call(runstring, shell=True) if __name__ ==", "def main(args): runstring = \"./run.sh \" + args[0] print(runstring) subprocess.call(runstring, shell=True) if __name__", "runstring = \"./run.sh \" + args[0] print(runstring) subprocess.call(runstring, shell=True) if __name__ == \"__main__\":", "import subprocess import sys def main(args): runstring = \"./run.sh \" + args[0] print(runstring)", "import sys def main(args): runstring = \"./run.sh \" + args[0] print(runstring) subprocess.call(runstring, shell=True)", "<reponame>Mohan-Zhang-u/TextSummarization<filename>bash_script_executer.py import subprocess import sys def main(args): runstring = \"./run.sh \" + args[0]", "= \"./run.sh \" + args[0] print(runstring) subprocess.call(runstring, shell=True) if __name__ == \"__main__\": main(sys.argv[1:])", "subprocess import sys def main(args): runstring = \"./run.sh \" + args[0] print(runstring) subprocess.call(runstring," ]
[ "@staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept':", "= 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type':", "<gh_stars>0 import json import urllib.parse import requests from lxml.html import fromstring from loguru", "import requests from lxml.html import fromstring from loguru import logger class Authenticator: def", "f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return", "return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded',", "params: params = {'ticket': self.get_service_ticket()} else: params = { 'pageSize': 200, **params, }", "*url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text)", "r.text def get(self, *url, **params): if not params: params = {'ticket': self.get_service_ticket()} else:", "}, ) return r.text def get(self, *url, **params): if not params: params =", "} r = None try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), )", "200, **params, } r = None try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params,", "apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def", "data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return r.text", "import urllib.parse import requests from lxml.html import fromstring from loguru import logger class", "'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return r.text def get(self, *url,", "'Accept': 'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post(", "except requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text) raise e r.encoding = 'utf-8'", "'User-Agent': 'python' }, ) return r.text def get(self, *url, **params): if not params:", "logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod", "'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket,", "headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return r.text def get(self,", "self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={", "= requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' },", "params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text) raise", "data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0]", "def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r", "__init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r =", "'/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if r:", "get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent':", "'python' }, ) return r.text def get(self, *url, **params): if not params: params", "else: params = { 'pageSize': 200, **params, } r = None try: r", "if not params: params = {'ticket': self.get_service_ticket()} else: params = { 'pageSize': 200,", "'Accept': 'text/plain', 'User-Agent': 'python' }, ) return r.text def get(self, *url, **params): if", "= {'ticket': self.get_service_ticket()} else: params = { 'pageSize': 200, **params, } r =", "requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if", "params = { 'pageSize': 200, **params, } r = None try: r =", "'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service':", "def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain',", "'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'},", "from loguru import logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url", "None try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError", "fromstring from loguru import logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey)", "'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded',", "{ 'pageSize': 200, **params, } r = None try: r = requests.get( '/'.join((self.base_url,", "= requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e)", "safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text) raise e", "'text/plain', 'User-Agent': 'python' }, ) return r.text def get(self, *url, **params): if not", "'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r", "**params, } r = None try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','),", ") return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type':", "apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post(", "headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self):", "fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept':", "'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r =", "get(self, *url, **params): if not params: params = {'ticket': self.get_service_ticket()} else: params =", "r = None try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status()", "'pageSize': 200, **params, } r = None try: r = requests.get( '/'.join((self.base_url, *url)),", "Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey):", "**params): if not params: params = {'ticket': self.get_service_ticket()} else: params = { 'pageSize':", "requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text) raise e r.encoding = 'utf-8' return", "import logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest'", "urllib.parse import requests from lxml.html import fromstring from loguru import logger class Authenticator:", "*url, **params): if not params: params = {'ticket': self.get_service_ticket()} else: params = {", "requests from lxml.html import fromstring from loguru import logger class Authenticator: def __init__(self,", "requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, )", "requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, )", "loguru import logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url =", "as e: logger.error(e) if r: print(r.text) raise e r.encoding = 'utf-8' return json.loads(r.text)", "{'ticket': self.get_service_ticket()} else: params = { 'pageSize': 200, **params, } r = None", "r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text) raise e r.encoding =", "r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python'", "r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as e:", "'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return r.text def", "from lxml.html import fromstring from loguru import logger class Authenticator: def __init__(self, apikey):", "try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except requests.exceptions.HTTPError as", "= { 'pageSize': 200, **params, } r = None try: r = requests.get(", "class Authenticator: def __init__(self, apikey): self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def", "lxml.html import fromstring from loguru import logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket", "self.time_granting_ticket = self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key',", "r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python'", "= None try: r = requests.get( '/'.join((self.base_url, *url)), params=urllib.parse.urlencode(params, safe=','), ) r.raise_for_status() except", ") r.raise_for_status() except requests.exceptions.HTTPError as e: logger.error(e) if r: print(r.text) raise e r.encoding", "self.get_service_ticket()} else: params = { 'pageSize': 200, **params, } r = None try:", "def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain',", "import fromstring from loguru import logger class Authenticator: def __init__(self, apikey): self.time_granting_ticket =", "not params: params = {'ticket': self.get_service_ticket()} else: params = { 'pageSize': 200, **params,", "'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return r.text def get(self, *url, **params):", "get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent':", "}, ) return fromstring(r.text).xpath('//form/@action')[0] def get_service_ticket(self): r = requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={", "import json import urllib.parse import requests from lxml.html import fromstring from loguru import", "= requests.post( self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' },", "= self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey':", ") return r.text def get(self, *url, **params): if not params: params = {'ticket':", "self.get_time_granting_ticket(apikey) self.base_url = 'https://uts-ws.nlm.nih.gov/rest' @staticmethod def get_time_granting_ticket(apikey): r = requests.post( f'https://utslogin.nlm.nih.gov/cas/v1/api-key', data={'apikey': apikey},", "def get(self, *url, **params): if not params: params = {'ticket': self.get_service_ticket()} else: params", "self.time_granting_ticket, data={'service': 'http://umlsks.nlm.nih.gov'}, headers={ 'Content-type': 'application/x-www-form-urlencoded', 'Accept': 'text/plain', 'User-Agent': 'python' }, ) return", "json import urllib.parse import requests from lxml.html import fromstring from loguru import logger", "return r.text def get(self, *url, **params): if not params: params = {'ticket': self.get_service_ticket()}", "params = {'ticket': self.get_service_ticket()} else: params = { 'pageSize': 200, **params, } r" ]
[]
[ "pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1]", "longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath / \"..\" / filepath) file_list", "[] for file in file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file: for line in", "errors=\"ignore\") as nmea_file: for line in nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts) <", "file in file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file: for line in nmea_file.readlines(): parts", "timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor,", "= mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, )", "mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy,", "mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone)", "read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference,", "= int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset )", "mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath / \"..\"", "information. \"\"\" import pynmea2 from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch,", "under the BSD 3-Clause License. See LICENSE.md file in the project root for", "int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10])", "= mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference =", "get_file_list(path) data_list = [] for file in file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file:", "def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string = \"autosub\"", "data sensor_string = \"autosub\" category = category output_format = ftype if category ==", "file in the project root for full license information. \"\"\" import pynmea2 from", "# parser meta data sensor_string = \"autosub\" category = category output_format = ftype", "beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference", "for file in file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file: for line in nmea_file.readlines():", "-*- coding: utf-8 -*- \"\"\" Copyright (c) 2020, University of Southampton All rights", "Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission,", "line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd =", "dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec", "reserved. Licensed under the BSD 3-Clause License. See LICENSE.md file in the project", "int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6])", "License. See LICENSE.md file in the project root for full license information. \"\"\"", "= get_file_list(path) data_list = [] for file in file_list: with file.open(\"r\", errors=\"ignore\") as", "Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath /", "continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(\" \")[0]", "(c) 2020, University of Southampton All rights reserved. Licensed under the BSD 3-Clause", "import pynmea2 from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from", "= mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference =", "of Southampton All rights reserved. Licensed under the BSD 3-Clause License. See LICENSE.md", "= category output_format = ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone", "int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4])", "in file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file: for line in nmea_file.readlines(): parts =", "!= beacon_id: continue date_str = line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10])", "utf-8 -*- \"\"\" Copyright (c) 2020, University of Southampton All rights reserved. Licensed", "\")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2])", "= \"autosub\" category = category output_format = ftype if category == Category.USBL: filepath", "Southampton All rights reserved. Licensed under the BSD 3-Clause License. See LICENSE.md file", "mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude", "/ filepath) file_list = get_file_list(path) data_list = [] for file in file_list: with", "int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch(", "mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path", "filepath) file_list = get_file_list(path) data_list = [] for file in file_list: with file.open(\"r\",", "from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser", "epoch_time + msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data =", "mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs", "import date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype,", "with file.open(\"r\", errors=\"ignore\") as nmea_file: for line in nmea_file.readlines(): parts = line.split(\"\\t\") if", "for line in nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts) < 2: continue msg", "the project root for full license information. \"\"\" import pynmea2 from auv_nav.sensors import", "= int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs =", "Licensed under the BSD 3-Clause License. See LICENSE.md file in the project root", "3-Clause License. See LICENSE.md file in the project root for full license information.", "auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category,", "= str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour =", "/ \"..\" / filepath) file_list = get_file_list(path) data_list = [] for file in", "from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import", "root for full license information. \"\"\" import pynmea2 from auv_nav.sensors import Category, Usbl", "parts = line.split(\"\\t\") if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id)", "if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy", "str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2])", "category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset", "continue date_str = line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm =", "sensor_string = \"autosub\" category = category output_format = ftype if category == Category.USBL:", "= get_raw_folder(outpath / \"..\" / filepath) file_list = get_file_list(path) data_list = [] for", "mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude", "= epoch_time + msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data", "project root for full license information. \"\"\" import pynmea2 from auv_nav.sensors import Category,", "license information. \"\"\" import pynmea2 from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import", "read_timezone from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): #", "full license information. \"\"\" import pynmea2 from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions", "parser meta data sensor_string = \"autosub\" category = category output_format = ftype if", "= mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl =", "int(msg.ref_station_id) != beacon_id: continue date_str = line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy =", "BSD 3-Clause License. See LICENSE.md file in the project root for full license", "timezone_offset ) epoch_timestamp = epoch_time + msec / 1000 + timeoffset msg.timestamp =", "import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data", "Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset", "hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour", "= date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset ) epoch_timestamp = epoch_time", "= sensor_string path = get_raw_folder(outpath / \"..\" / filepath) file_list = get_file_list(path) data_list", "msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data = usbl.export(output_format) data_list.append(data)", "file.open(\"r\", errors=\"ignore\") as nmea_file: for line in nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts)", "as nmea_file: for line in nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts) < 2:", "path = get_raw_folder(outpath / \"..\" / filepath) file_list = get_file_list(path) data_list = []", "category, ftype, outpath): # parser meta data sensor_string = \"autosub\" category = category", "get_raw_folder(outpath / \"..\" / filepath) file_list = get_file_list(path) data_list = [] for file", "in nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts) < 2: continue msg = pynmea2.parse(parts[1])", "= pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(\" \")[0] hour_str =", "hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time", "All rights reserved. Licensed under the BSD 3-Clause License. See LICENSE.md file in", "category = category output_format = ftype if category == Category.USBL: filepath = mission.usbl.filepath", "= read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset,", "int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd,", "mm, dd, hour, mins, secs, timezone_offset ) epoch_timestamp = epoch_time + msec /", "get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string =", "parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string = \"autosub\" category", "date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath):", "= Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath", "import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder", ") usbl.sensor_string = sensor_string path = get_raw_folder(outpath / \"..\" / filepath) file_list =", "1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data = usbl.export(output_format) data_list.append(data) return data_list", "msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(\" \")[0] hour_str", "beacon_id: continue date_str = line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm", "ftype, outpath): # parser meta data sensor_string = \"autosub\" category = category output_format", "category output_format = ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone =", "timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl", "= int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins,", "= line.split(\"\\t\") if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) !=", "longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string =", "auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import get_file_list,", "output_format = ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone", "\"\"\" Copyright (c) 2020, University of Southampton All rights reserved. Licensed under the", "hour, mins, secs, timezone_offset ) epoch_timestamp = epoch_time + msec / 1000 +", "data_list = [] for file in file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file: for", "LICENSE.md file in the project root for full license information. \"\"\" import pynmea2", "coding: utf-8 -*- \"\"\" Copyright (c) 2020, University of Southampton All rights reserved.", "\"\"\" import pynmea2 from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone", "get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta data sensor_string", "= ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id", "= int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm,", "secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd, hour,", "secs, timezone_offset ) epoch_timestamp = epoch_time + msec / 1000 + timeoffset msg.timestamp", "Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder def", "oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle, category, ftype, outpath): # parser meta", "timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference", "= line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd", "if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue", "usbl.sensor_string = sensor_string path = get_raw_folder(outpath / \"..\" / filepath) file_list = get_file_list(path)", "file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file: for line in nmea_file.readlines(): parts = line.split(\"\\t\")", "for full license information. \"\"\" import pynmea2 from auv_nav.sensors import Category, Usbl from", "< 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str =", "= int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time =", "2020, University of Southampton All rights reserved. Licensed under the BSD 3-Clause License.", "in the project root for full license information. \"\"\" import pynmea2 from auv_nav.sensors", "if category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label", "ftype if category == Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id =", "# -*- coding: utf-8 -*- \"\"\" Copyright (c) 2020, University of Southampton All", "mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string", "/ 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data = usbl.export(output_format) data_list.append(data) return", "= mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string", "outpath): # parser meta data sensor_string = \"autosub\" category = category output_format =", "file_list = get_file_list(path) data_list = [] for file in file_list: with file.open(\"r\", errors=\"ignore\")", "int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset ) epoch_timestamp", "filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset", "latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath / \"..\" / filepath)", "yyyy, mm, dd, hour, mins, secs, timezone_offset ) epoch_timestamp = epoch_time + msec", "= mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset = mission.usbl.timeoffset timezone_offset =", "latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference,", "usbl = Usbl( mission.usbl.std_factor, mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path =", "epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset ) epoch_timestamp =", "== Category.USBL: filepath = mission.usbl.filepath timezone = mission.usbl.timezone beacon_id = mission.usbl.label timeoffset =", "int(hour_str[4:6]) msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs,", "date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset ) epoch_timestamp = epoch_time +", "line.split(\"\\t\") if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id:", "rights reserved. Licensed under the BSD 3-Clause License. See LICENSE.md file in the", "dd, hour, mins, secs, timezone_offset ) epoch_timestamp = epoch_time + msec / 1000", "\"autosub\" category = category output_format = ftype if category == Category.USBL: filepath =", "line in nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts) < 2: continue msg =", "nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if", "Copyright (c) 2020, University of Southampton All rights reserved. Licensed under the BSD", "len(parts) < 2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str", "= int(date_str[0:2]) hour = int(hour_str[0:2]) mins = int(hour_str[2:4]) secs = int(hour_str[4:6]) msec =", ") epoch_timestamp = epoch_time + msec / 1000 + timeoffset msg.timestamp = epoch_timestamp", "+ msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg) data = usbl.export(output_format)", "the BSD 3-Clause License. See LICENSE.md file in the project root for full", "University of Southampton All rights reserved. Licensed under the BSD 3-Clause License. See", "from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab import get_file_list, get_raw_folder def parse_NOC_nmea(mission, vehicle,", "meta data sensor_string = \"autosub\" category = category output_format = ftype if category", "2: continue msg = pynmea2.parse(parts[1]) if int(msg.ref_station_id) != beacon_id: continue date_str = line.split(\"", "sensor_string path = get_raw_folder(outpath / \"..\" / filepath) file_list = get_file_list(path) data_list =", "= int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins =", "vehicle, category, ftype, outpath): # parser meta data sensor_string = \"autosub\" category =", "yyyy = int(date_str[6:10]) mm = int(date_str[3:5]) dd = int(date_str[0:2]) hour = int(hour_str[0:2]) mins", "= [] for file in file_list: with file.open(\"r\", errors=\"ignore\") as nmea_file: for line", "mins, secs, timezone_offset ) epoch_timestamp = epoch_time + msec / 1000 + timeoffset", "\"..\" / filepath) file_list = get_file_list(path) data_list = [] for file in file_list:", "-*- \"\"\" Copyright (c) 2020, University of Southampton All rights reserved. Licensed under", "epoch_timestamp = epoch_time + msec / 1000 + timeoffset msg.timestamp = epoch_timestamp usbl.from_nmea(msg)", "msec = int(hour_str[7:10]) epoch_time = date_time_to_epoch( yyyy, mm, dd, hour, mins, secs, timezone_offset", "mission.usbl.std_offset, latitude_reference, longitude_reference, ) usbl.sensor_string = sensor_string path = get_raw_folder(outpath / \"..\" /", "nmea_file: for line in nmea_file.readlines(): parts = line.split(\"\\t\") if len(parts) < 2: continue", "mission.usbl.timeoffset timezone_offset = read_timezone(timezone) latitude_reference = mission.origin.latitude longitude_reference = mission.origin.longitude usbl = Usbl(", "date_str = line.split(\" \")[0] hour_str = str(parts[1]).split(\",\")[1] yyyy = int(date_str[6:10]) mm = int(date_str[3:5])", "pynmea2 from auv_nav.sensors import Category, Usbl from auv_nav.tools.time_conversions import date_time_to_epoch, read_timezone from oplab", "See LICENSE.md file in the project root for full license information. \"\"\" import" ]
[]
[ ":return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试", "的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert", "\"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return:", "def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test()", "def _test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True)", ".set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test()", "\"unit\": None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188, \"primePrice\":", "\"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\":", "\"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\"", "\"price\": 188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\":", "'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def", "的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids", "\"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return:", "'$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code", "\"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462,", "\"remark\": None, \"openSN\": 0, \"expiration\": None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\":", "的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test()", "hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token", "GoodsInformation from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery", "hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单", "}, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情", "assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self):", "from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from", "的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单", "\"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0,", "ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date", "采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\"", "None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\"", "ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code = 'CD201905300017' storage_uid", "188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188,", ".set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return:", "def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids)", "\"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self):", "def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据", "PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\"", "\"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0, \"expiration\": None, \"total_money\":", "storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code,", "188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\",", "的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单", "storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单", "hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export", "的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分", "assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\" assert", "\"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试", "{ \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\",", "_test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试", "\"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" },", "\"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1,", "'2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\"", "\"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType':", "\"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\"", "SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result()", "\\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情", "获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test()", "\\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return:", "\"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\":", "的单元测试 :return: \"\"\" data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\",", "from pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\"", "0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0, \"expiration\": None, \"total_money\": 188,", "import Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import", "import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import", "Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data = { \"task_id\": 3686347,", "StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock", "def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test()", "hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods", "hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku", "\"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\":", "= 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid,", "test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726' GoodsInformationsku(goods_uid).test() if __name__ ==", "hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order from hupun.page.order_goods", "unittest from hupun.page.hupun_goods.goods_information import GoodsInformation from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq", "ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data = {", "= ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\"", "\"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\":", ".set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分", "import unittest from hupun.page.hupun_goods.goods_information import GoodsInformation from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import", "',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date = Date.now()", "\"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return:", "\"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188, \"sn\":", "def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试", "的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert", "storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\"", "订单 的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\", "supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\", "的单元测试 :return: \"\"\" data = { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\":", ".set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD',", ":return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test()", "= { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\":", "import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import", "\"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ','", ":return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726'", "hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock", "'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\"", "def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data = { \"task_id\": 3686347, \"oper_uid\":", "hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock", "= '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name)", "18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\"", "的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test()", "PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def", ":return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self):", "None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188, \"sn\": None,", "'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self):", "import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import", "ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self):", "\"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\":", "bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name", "def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code = 'CD201905300017' storage_uid =", "from hupun.page.order import Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder from", "ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data = [ {", "PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\"", "的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def", "compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\"", "\"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\":", "\"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单", "import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import", "from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from", "storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return:", "\"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self):", ":return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test()", "\"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87,", "的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\"", "from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order from hupun.page.order_goods import OrderGoods from", "} ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code = 'CD201905300017'", "\"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\":", "import StatementExport from hupun.page.order import Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import", "进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\"", "StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试", "\"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def", "import OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import", "def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\"", "PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku", "_test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def", "None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100,", "None, \"openSN\": 0, \"expiration\": None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None,", "'研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\", "= 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\", "进销存报表下载 的单元测试 :return: \"\"\" data = { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2,", "_test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分", "import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import", "'0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code =", "1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100, \"price\": 188,", "hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill", "188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def", "pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\" Order(True)", "商品详情 的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单", "data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\":", "\"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\"", "_test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) +", "] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token =", "hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order", "_test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def", "0, \"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ]", "\"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\":", "\"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0, \"expiration\": None,", ":return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return:", "PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods", "\"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87, \"nums\": 1,", "\"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\"", "1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data = { \"task_id\":", "商品详情 的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的", ".set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\"", "assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self):", ":return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def", "supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format())", ":return: \"\"\" data = { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7,", "\"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\",", "OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder", "ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock", "_test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self):", "\"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\":", "bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data", "Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\"", "\"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def", "\"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\": None,", "7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\": None,", "Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test()", "} ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token", "hupun.page.hupun_goods.goods_information import GoodsInformation from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query", "\"\"\" 采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return:", "\"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0, \"expiration\": None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\":", "from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from", "token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\"", "PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\"", "\"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\":", "87, \"nums\": 1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188,", "{ \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None,", "\"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids()", "_test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return:", "_test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\",", "\"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self):", "采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\"", ":return: \"\"\" data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\":", "hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house", "\"\"\" 订单 的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format())", "\"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\":", "ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order from hupun.page.order_goods import OrderGoods", "1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test()", "from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from", "None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188, \"primePrice\": 188,", "None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0,", "import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import", "采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\"", "1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0,", "订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单", "None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单", "'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format())", "\"specName\": None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\":", "ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self):", "from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from", "data = { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\",", "\\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\"", "= '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data =", "\"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\"", "\"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test()", "进销存表报导出 的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time(", "的单元测试 :return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载", "= [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\",", "PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku", "_test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息", "def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情", "hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase):", "Date class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format())", "采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试 :return:", "采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\"", "\"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask',", "def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726' GoodsInformationsku(goods_uid).test() if __name__", "188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0,", "\"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test() def _test_get_goods_information(self): \"\"\" 商品信息 的单元测试", "\"nums\": 1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\":", "'$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return:", "采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name =", "SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分 :return:", "2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4,", "\"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self):", "的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\" assert", "\"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0, \"expiration\": None, \"total_money\": 188, \"pay_type\":", "import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order from hupun.page.order_goods import", "StatementExport from hupun.page.order import Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder", "\"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\":", "None, \"tax\": 0, \"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\"", "\"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726' GoodsInformationsku(goods_uid).test() if __name__ == '__main__':", "Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert", "def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test()", "\"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def", "\\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576',", "from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from", ":return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids =", "\"download_time\": \"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId':", "GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726' GoodsInformationsku(goods_uid).test() if", "\"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\": None,", "from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from", "StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date,", "\"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0, \"expiration\": None, \"total_money\": 188, \"pay_type\": None,", "\"\"\" data = { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\":", "\"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name", "\"2019-06-11T12:02:50Z\", \"count\": 1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0',", "supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情", "\"count\": 1462, \"download_times\": 4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', }", "\"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\":", "PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids =", "import SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分", "\"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self):", "\"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\"", "100, \"price\": 188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\",", "def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def", "的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token", "\"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def", "assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return: \"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids", "0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None,", "'测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def", "采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\",", "采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\"", "采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\"", "\"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\":", "\"unit_size\": 1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100, \"price\":", "\"openSN\": 0, \"expiration\": None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\":", "\"\"\" 采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return:", "from hupun.page.purchase_order_goods import PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from", "PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill", "+ ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date =", "OrderGoods('A4380F4D6D153825AB891D632C341A45', 'D1E338D6015630E3AFF2440F3CBBAFAD', 'TB328906912208400576', '2019-01-17T02:49:20Z').test() def _test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test()", "\"expiration\": None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\":", "商品信息 的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid", "PurchaseOrderGoods from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse", "\"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\", \"count\":", ".test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913'", "\"\"\" bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160'", "的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓'", "GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport", "ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken", "def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出", "Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods import PurchaseOrderGoods", "_test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data = { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\",", "import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import", "\\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def _test_order_goods(self): \"\"\" 订单商品详情 的测试部分 :return: \"\"\" assert OrderGoods('A4380F4D6D153825AB891D632C341A45',", "[ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\":", "_test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情 的单元测试 :return: \"\"\" bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2'", "的单元测试 :return: \"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid =", "\"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单详情", "\"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\": 0, \"expiration\":", "storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid,", "0, \"expiration\": None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None,", "','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date", "'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777'", "None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, {", "= Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data", "3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\": \"2019-06-11T12:02:50Z\",", "def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data = [ { \"goodsUid\":", "'4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return: \"\"\" data = [", "\"\"\" storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self):", "\"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\", \"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\":", "\"tax\": 0, \"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }", "hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date", "= StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询", "_test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\", "\"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单", "的单元测试 :return: \"\"\" bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid", "188, \"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\",", "\\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试", "ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order", "\\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid =", "\"specCode\": \"1919N00002W404\", \"specName\": None, \"unit_size\": 1, \"pchs_unit\": None, \"unit\": None, \"shouldNums\": 87, \"nums\":", "_test_purchase_order(self): \"\"\" 采购订单 的测试部分 :return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分", "\"\"\" 采购入库单查看详情数据 的测试部分 :return: \"\"\" assert PurchaseStoreOrderGoods('35414A5328FD3F66B3279E1ACC1E5E47').test() def _test_statement_export(self): \"\"\" 进销存表报导出 的单元测试 :return:", "\"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\": 2, \"export_type\": 7, \"exportCaption\": \"进销存报表\", \"create_time\": \"2019-06-10T19:12:24Z\", \"download_time\":", "188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\",", "4, \"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self):", "\"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188, \"sn\": None, \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" }, { \"$dataType\":", "'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name = '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name,", ":return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-1).to_day_end().format()).test() Order(True) \\ .set_start_time(Date.now().plus_days(-120).to_day_start().format()) \\ .set_end_time(Date.now().plus_days(-120).to_day_end().format()).test() def", "的提交入库变动的 的单元测试 :return: \"\"\" data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\":", "from hupun.page.sync_module.confirm_purchase_stock import ConfirmPurBillStock from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from", "import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase): def", ":return: \"\"\" bill_code = 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid =", "= PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return: \"\"\" PurchaseStockToken().test()", "_test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def", ":return: \"\"\" assert PurchaseOrder(True).set_start_time(Date.now().plus_days(-1).format()).test() def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test()", "商品信息sku 的单元测试 :return: \"\"\" goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726' GoodsInformationsku(goods_uid).test() if __name__ == '__main__': unittest.main()", "_test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self): \"\"\" 采购入库单查看详情数据 的测试部分", "{ \"$dataType\": \"v:purchase.stock$dtStockBillDetail\" } ] SubmitPurBillStock(data).test() def _test_confirm_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试", "from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self): \"\"\"", "None, \"unit\": None, \"shouldNums\": 87, \"nums\": 1, \"discount_rate\": 100, \"price\": 188, \"pivtLast\": 188,", "的单元测试 :return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试", "def _test_purchase_order_goods(self): \"\"\" 采购订单查看详情 的测试部分 :return: \"\"\" assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单", "from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from", "from hupun.page.sync_module.get_purchase_stock_token import PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date class", "from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order from", "PurchaseStockToken from hupun.page.sync_module.submit_purchase_stock import SubmitPurBillStock from pyspider.helper.date import Date class Test(unittest.TestCase): def _test_order(self):", "import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import", "= 'CD201905300017' storage_uid = 'FBA807A72474376E8CFBBE9848F271B2' storage_name = '研发测试仓' supplier_uid = 'EDF923722E993179829C929468693160' supplier_name =", ":return: \"\"\" bill_uid = '4E914B16058C3D02A42CE6479666A913' ChoosePurBillSku(bill_uid).test() def _test_submit_purchase_stock(self): \"\"\" 采购入库单 的提交入库变动的 的单元测试 :return:", "from hupun.page.hupun_goods.goods_information import GoodsInformation from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from", "assert PurchaseOrderGoods('189C28D94B3D390191F1DD1723F9544E').test() def _test_purchase_store_order(self): \"\"\" 采购入库单 的测试部分 :return: \"\"\" assert PurchaseStoreOrder(True).set_start_time(Date.now().to_day_start().format()).test() def _test_purchase_store_order_goods(self):", "\"\"\" token = PurchaseStockToken().get_result() ConfirmPurBillStock(token).test() def _test_get_purchase_stock_token(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 获取token 的单元测试 :return:", "\"\"\" data = [ { \"goodsUid\": \"4AFB3148514C3FA99F332B05AAEC0A92\", \"goodsName\": \"测试--想念\", \"specUid\": \"1000577C001E3D14A8041BC5FD4CCDCE\", \"pic1\": \"http://test.image.yourdream.cc/ai-admin/ffa0d4ab8f89e8a6f79b0239f906a6b7.png\",", "hupun.page.order import Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order import PurchaseOrder from hupun.page.purchase_order_goods", "= '测试777777' ChoosePurBill(bill_code, storage_uid, storage_name, supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test()", "\"10000\", \"remark\": None, \"openSN\": 0, \"expiration\": None, \"total_money\": 188, \"pay_type\": None, \"pchs_advance_balance\": 18128,", "\"oper_nick\": None, \"file_path\": \"export/excel/D1E338D6015630E3AFF2440F3CBBAFAD/进销存报表20190610191250_0(3686347).xlsx\", '$dataType': 'dtExportTask', '$entityId': '0', } ExportFileDownloadReq(data).test() def _test_choose_purchase_bill(self): \"\"\"", "\"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\": 0, \"pchs_detail_uid\": \"9DC3D695B16A3160BAEDD6E249B01C25\", \"pchs_detail_index\": \"10000\", \"remark\": None, \"openSN\":", "import StoreHouse from hupun.page.sync_module.choose_purchase_bill import ChoosePurBill from hupun.page.sync_module.choose_purchase_bill_sku import ChoosePurBillSku from hupun.page.sync_module.confirm_purchase_stock import", "\"pchs_advance_balance\": 18128, \"stock_advance_balance\": None, \"settle_advance_balance\": None, \"tax\": 0, \"net_price\": 188, \"sn\": None, \"$dataType\":", "import Date class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\" Order(True) \\", "Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data =", "from hupun.page.purchase_store_order import PurchaseStoreOrder from hupun.page.purchase_store_order_goods import PurchaseStoreOrderGoods from hupun_slow_crawl.model.es.store_house import StoreHouse from", "class Test(unittest.TestCase): def _test_order(self): \"\"\" 订单 的测试部分 :return: \"\"\" Order(True) \\ .set_start_time(Date.now().plus_days(-1).to_day_start().format()) \\", "\"\"\" 进销存报表下载 的单元测试 :return: \"\"\" data = { \"task_id\": 3686347, \"oper_uid\": \"9459514BF68F3C0A84343938A2CD7D75\", \"status\":", "\"\"\" GoodsInformation().test() def test_get_goods_information_sku(self): \"\"\" 商品信息sku 的单元测试 :return: \"\"\" goods_uid = 'C59933D09A893FDBB2FE8BB9BDD5E726' GoodsInformationsku(goods_uid).test()", "import GoodsInformation from hupun.page.hupun_goods.goods_information_sku import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import", "Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\" 进销存报表导出记录查询 的单元测试 :return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time(", "\"pivtLast\": 188, \"primePrice\": 188, \"base_price\": 188, \"tax_rate\": 0, \"pchs_bill_uid\": \"483FAB78DF98341C8A7E0F16577E4F21\", \"pchs_bill_code\": \"CD201905300017\", \"appointBillType\":", "import GoodsInformationsku from hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import", "storage_ids = StoreHouse().get_storage_ids() storage_uids = ','.join(storage_ids) + ',' StatementExport(storage_uids).set_start_time(Date.now().plus_days(-1).format()).set_end_time( Date.now().plus_days(-1).format()).test() def _test_statement_task_query(self): \"\"\"", "hupun.page.in_sale_store_table.export_file_download_req import ExportFileDownloadReq from hupun.page.in_sale_store_table.export_task_query import ExportTaskQuery from hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order", ":return: \"\"\" compare_date = Date.now() ExportTaskQuery(compare_date, 1462).set_start_time(Date.now().plus_days(-7).format()).set_end_time( Date.now().format()).set_delay_seconds(1).test() def _test_statement_file_download(self): \"\"\" 进销存报表下载 的单元测试", ".set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单 商品详情 的单元测试 :return: \"\"\" bill_uid", "supplier_uid, supplier_name) \\ .set_start_time(Date.now().plus_days(-60).format()) \\ .set_end_time(Date.now().format()) \\ .test() def _test_choose_purchase_bill_sku(self): \"\"\" 采购入库单 的选择采购订单部分的采购订单", "hupun.page.in_sale_store_table.table_export import StatementExport from hupun.page.order import Order from hupun.page.order_goods import OrderGoods from hupun.page.purchase_order" ]
[ "regular standard points. Made by a <NAME>, <NAME> for a School project at", "have five dices with six sides, you roll the set of dices, and", "sides, you roll the set of dices, and check if you score bonus", "for a School project at Hetic. \"\"\" __version__ = \"0.1.5\" from .main import", ".main import Borre as Borre from .dice import Dice as Dice from .player", "game maker, game rules are simple, you usually have five dices with six", "dead simple Farkle dice game implementation and game maker, game rules are simple,", "Dice as Dice from .player import Player as Player from .score import Score", "Borre from .dice import Dice as Dice from .player import Player as Player", "and check if you score bonus or regular standard points. Made by a", "you score bonus or regular standard points. Made by a <NAME>, <NAME> for", "by a <NAME>, <NAME> for a School project at Hetic. \"\"\" __version__ =", "from .main import Borre as Borre from .dice import Dice as Dice from", "simple, you usually have five dices with six sides, you roll the set", "project at Hetic. \"\"\" __version__ = \"0.1.5\" from .main import Borre as Borre", "usually have five dices with six sides, you roll the set of dices,", "five dices with six sides, you roll the set of dices, and check", "Made by a <NAME>, <NAME> for a School project at Hetic. \"\"\" __version__", "rules are simple, you usually have five dices with six sides, you roll", "import Dice as Dice from .player import Player as Player from .score import", "\"\"\" Borre is a dead simple Farkle dice game implementation and game maker,", "the set of dices, and check if you score bonus or regular standard", "of dices, and check if you score bonus or regular standard points. Made", "a <NAME>, <NAME> for a School project at Hetic. \"\"\" __version__ = \"0.1.5\"", "<NAME> for a School project at Hetic. \"\"\" __version__ = \"0.1.5\" from .main", "as Borre from .dice import Dice as Dice from .player import Player as", "from .dice import Dice as Dice from .player import Player as Player from", "is a dead simple Farkle dice game implementation and game maker, game rules", "at Hetic. \"\"\" __version__ = \"0.1.5\" from .main import Borre as Borre from", "dice game implementation and game maker, game rules are simple, you usually have", "six sides, you roll the set of dices, and check if you score", "roll the set of dices, and check if you score bonus or regular", "dices, and check if you score bonus or regular standard points. Made by", "import Borre as Borre from .dice import Dice as Dice from .player import", ".dice import Dice as Dice from .player import Player as Player from .score", "and game maker, game rules are simple, you usually have five dices with", "if you score bonus or regular standard points. Made by a <NAME>, <NAME>", "you usually have five dices with six sides, you roll the set of", "<NAME>, <NAME> for a School project at Hetic. \"\"\" __version__ = \"0.1.5\" from", "Borre is a dead simple Farkle dice game implementation and game maker, game", "Farkle dice game implementation and game maker, game rules are simple, you usually", "check if you score bonus or regular standard points. Made by a <NAME>,", "\"0.1.5\" from .main import Borre as Borre from .dice import Dice as Dice", "set of dices, and check if you score bonus or regular standard points.", "\"\"\" __version__ = \"0.1.5\" from .main import Borre as Borre from .dice import", "Borre as Borre from .dice import Dice as Dice from .player import Player", "bonus or regular standard points. Made by a <NAME>, <NAME> for a School", "Dice from .player import Player as Player from .score import Score as Score", "dices with six sides, you roll the set of dices, and check if", "you roll the set of dices, and check if you score bonus or", "__version__ = \"0.1.5\" from .main import Borre as Borre from .dice import Dice", "simple Farkle dice game implementation and game maker, game rules are simple, you", "points. Made by a <NAME>, <NAME> for a School project at Hetic. \"\"\"", "= \"0.1.5\" from .main import Borre as Borre from .dice import Dice as", "game implementation and game maker, game rules are simple, you usually have five", "implementation and game maker, game rules are simple, you usually have five dices", "a dead simple Farkle dice game implementation and game maker, game rules are", "are simple, you usually have five dices with six sides, you roll the", "standard points. Made by a <NAME>, <NAME> for a School project at Hetic.", "Hetic. \"\"\" __version__ = \"0.1.5\" from .main import Borre as Borre from .dice", "School project at Hetic. \"\"\" __version__ = \"0.1.5\" from .main import Borre as", "as Dice from .player import Player as Player from .score import Score as", "or regular standard points. Made by a <NAME>, <NAME> for a School project", "score bonus or regular standard points. Made by a <NAME>, <NAME> for a", "maker, game rules are simple, you usually have five dices with six sides,", "with six sides, you roll the set of dices, and check if you", "a School project at Hetic. \"\"\" __version__ = \"0.1.5\" from .main import Borre", "game rules are simple, you usually have five dices with six sides, you" ]
[ "- Network traffic analyzer # By <NAME> <<EMAIL>> # Copyright 1998 <NAME> #", "<NAME> (See AUTHORS file) # # Verifies last commit with clang-check (like scan-build)", "\"^D\" | cut -f2 | grep \"\\\\.c$\\|cpp$\" ) for FILE in $COMMIT_FILES; do", "(See AUTHORS file) # # Verifies last commit with clang-check (like scan-build) for", "# Copyright 2018, <NAME> (See AUTHORS file) # # Verifies last commit with", "Network traffic analyzer # By <NAME> <<EMAIL>> # Copyright 1998 <NAME> # #", "-f2 | grep \"\\\\.c$\\|cpp$\" ) for FILE in $COMMIT_FILES; do clang-check -analyze ../$FILE", "Dish # # Wireshark - Network traffic analyzer # By <NAME> <<EMAIL>> #", "commit with clang-check (like scan-build) for Petri Dish # # Wireshark - Network", "# # Wireshark - Network traffic analyzer # By <NAME> <<EMAIL>> # Copyright", "| grep -v \"^D\" | cut -f2 | grep \"\\\\.c$\\|cpp$\" ) for FILE", "grep -v \"^D\" | cut -f2 | grep \"\\\\.c$\\|cpp$\" ) for FILE in", "analyzer # By <NAME> <<EMAIL>> # Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later", "# Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached", "1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status HEAD^", "HEAD^ | grep -v \"^D\" | cut -f2 | grep \"\\\\.c$\\|cpp$\" ) for", "# By <NAME> <<EMAIL>> # Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later #", "scan-build) for Petri Dish # # Wireshark - Network traffic analyzer # By", "Petri Dish # # Wireshark - Network traffic analyzer # By <NAME> <<EMAIL>>", "file) # # Verifies last commit with clang-check (like scan-build) for Petri Dish", "<<EMAIL>> # Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index", "| grep \"\\\\.c$\\|cpp$\" ) for FILE in $COMMIT_FILES; do clang-check -analyze ../$FILE done", "--cached --name-status HEAD^ | grep -v \"^D\" | cut -f2 | grep \"\\\\.c$\\|cpp$\"", "cut -f2 | grep \"\\\\.c$\\|cpp$\" ) for FILE in $COMMIT_FILES; do clang-check -analyze", "# Verifies last commit with clang-check (like scan-build) for Petri Dish # #", "last commit with clang-check (like scan-build) for Petri Dish # # Wireshark -", "--name-status HEAD^ | grep -v \"^D\" | cut -f2 | grep \"\\\\.c$\\|cpp$\" )", "Copyright 2018, <NAME> (See AUTHORS file) # # Verifies last commit with clang-check", "<NAME> <<EMAIL>> # Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git", "2018, <NAME> (See AUTHORS file) # # Verifies last commit with clang-check (like", "Verifies last commit with clang-check (like scan-build) for Petri Dish # # Wireshark", "#!/bin/sh # Copyright 2018, <NAME> (See AUTHORS file) # # Verifies last commit", "# Wireshark - Network traffic analyzer # By <NAME> <<EMAIL>> # Copyright 1998", "for Petri Dish # # Wireshark - Network traffic analyzer # By <NAME>", "By <NAME> <<EMAIL>> # Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$(", "<NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ |", "GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ | grep -v \"^D\" |", "AUTHORS file) # # Verifies last commit with clang-check (like scan-build) for Petri", "COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ | grep -v \"^D\" | cut -f2", "| cut -f2 | grep \"\\\\.c$\\|cpp$\" ) for FILE in $COMMIT_FILES; do clang-check", "clang-check (like scan-build) for Petri Dish # # Wireshark - Network traffic analyzer", "git diff-index --cached --name-status HEAD^ | grep -v \"^D\" | cut -f2 |", "-v \"^D\" | cut -f2 | grep \"\\\\.c$\\|cpp$\" ) for FILE in $COMMIT_FILES;", "diff-index --cached --name-status HEAD^ | grep -v \"^D\" | cut -f2 | grep", "traffic analyzer # By <NAME> <<EMAIL>> # Copyright 1998 <NAME> # # SPDX-License-Identifier:", "Wireshark - Network traffic analyzer # By <NAME> <<EMAIL>> # Copyright 1998 <NAME>", "with clang-check (like scan-build) for Petri Dish # # Wireshark - Network traffic", "Copyright 1998 <NAME> # # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status", "<filename>resources/Wireshark/WiresharkDissectorFoo/tools/validate-clang-check.py #!/bin/sh # Copyright 2018, <NAME> (See AUTHORS file) # # Verifies last", "# COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ | grep -v \"^D\" | cut", "(like scan-build) for Petri Dish # # Wireshark - Network traffic analyzer #", "# # SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ | grep", "SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ | grep -v \"^D\"", "# SPDX-License-Identifier: GPL-2.0-or-later # COMMIT_FILES=$( git diff-index --cached --name-status HEAD^ | grep -v", "# # Verifies last commit with clang-check (like scan-build) for Petri Dish #" ]
[ "self.date_to and self.date_from >= self.date_to: msg = _('%(date_from)s have to be before %(date_to)s.')", "= models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class", "django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4,", "= True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def", "models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from and", "= models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from and self.date_to and self.date_from >=", "auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract = True", "as _ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta: abstract = True", "date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if", "= models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from", "Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified", "to be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name } raise ValidationError({'date_from':", ">= self.date_to: msg = _('%(date_from)s have to be before %(date_to)s.') % { 'date_from':", "be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name } raise ValidationError({'date_from': msg,", "from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta: abstract = True uuid =", "class Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event", "before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name } raise ValidationError({'date_from': msg, 'date_to':", "_ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta: abstract = True uuid", "import models from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class", "models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel):", "django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError", "def clean(self): if self.date_from and self.date_to and self.date_from >= self.date_to: msg = _('%(date_from)s", "<filename>fit2gether_core/models/abstract.py import uuid from django.db import models from django.utils.translation import ugettext_lazy as _", "class FromToModel(Fit2getherModel): class Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to", "blank=True) def clean(self): if self.date_from and self.date_to and self.date_from >= self.date_to: msg =", "have to be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name } raise", "import uuid from django.db import models from django.utils.translation import ugettext_lazy as _ from", "blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from and self.date_to and", "FromToModel(Fit2getherModel): class Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to =", "modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'),", "import ValidationError class Fit2getherModel(models.Model): class Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False)", "ends'), blank=True) def clean(self): if self.date_from and self.date_to and self.date_from >= self.date_to: msg", "import ugettext_lazy as _ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta: abstract", "from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import", "models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract =", "and self.date_from >= self.date_to: msg = _('%(date_from)s have to be before %(date_to)s.') %", "self.date_to: msg = _('%(date_from)s have to be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name,", "class Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True)", "self.date_from >= self.date_to: msg = _('%(date_from)s have to be before %(date_to)s.') % {", "uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True)", "msg = _('%(date_from)s have to be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to':", "date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta:", "= models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract", "Fit2getherModel(models.Model): class Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'),", "True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self):", "abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified =", "= _('%(date_from)s have to be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name", "and self.date_to and self.date_from >= self.date_to: msg = _('%(date_from)s have to be before", "= True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last", "auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True)", "models from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model):", "django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta:", "date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from and self.date_to and self.date_from", "Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'),", "clean(self): if self.date_from and self.date_to and self.date_from >= self.date_to: msg = _('%(date_from)s have", "ValidationError class Fit2getherModel(models.Model): class Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created", "abstract = True date_from = models.DateTimeField(verbose_name=_('Event starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True)", "ugettext_lazy as _ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class Meta: abstract =", "self.date_from and self.date_to and self.date_from >= self.date_to: msg = _('%(date_from)s have to be", "= models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract = True date_from =", "date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract = True date_from", "models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class Meta: abstract = True date_from = models.DateTimeField(verbose_name=_('Event", "%(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name } raise ValidationError({'date_from': msg, 'date_to': msg})", "from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ValidationError class Fit2getherModel(models.Model): class", "True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'),", "_('%(date_from)s have to be before %(date_to)s.') % { 'date_from': self._meta.get_field('date_from').verbose_name, 'date_to': self._meta.get_field('date_to').verbose_name }", "class Fit2getherModel(models.Model): class Meta: abstract = True uuid = models.UUIDField(default=uuid.uuid4, editable=False) date_created =", "models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from and self.date_to and self.date_from >= self.date_to:", "editable=False) date_created = models.DateTimeField(verbose_name=_('Created'), auto_now_add=True) date_modified = models.DateTimeField(verbose_name=_('Last modified'), auto_now=True) class FromToModel(Fit2getherModel): class", "if self.date_from and self.date_to and self.date_from >= self.date_to: msg = _('%(date_from)s have to", "uuid from django.db import models from django.utils.translation import ugettext_lazy as _ from django.core.exceptions", "starts'), blank=True) date_to = models.DateTimeField(verbose_name=_('Event ends'), blank=True) def clean(self): if self.date_from and self.date_to" ]
[ "l[i] = a count = 0 for i in range(n): for j in", "l = list(map(int,input().split(', '))) for i in range(len(l)): num = l[i] a =", "= 0 for i in range(n): for j in range(i,n): if l[i]>l[j]: count", "list(map(int,input().split(', '))) for i in range(len(l)): num = l[i] a = 0 while", "a = 0 while num>0: a += num%6 num = num//6 l[i] =", "+= num%6 num = num//6 l[i] = a count = 0 for i", "= a count = 0 for i in range(n): for j in range(i,n):", "num//6 l[i] = a count = 0 for i in range(n): for j", "= list(map(int,input().split(', '))) for i in range(len(l)): num = l[i] a = 0", "num = num//6 l[i] = a count = 0 for i in range(n):", "= num//6 l[i] = a count = 0 for i in range(n): for", "in range(len(l)): num = l[i] a = 0 while num>0: a += num%6", "l[i] a = 0 while num>0: a += num%6 num = num//6 l[i]", "= 0 while num>0: a += num%6 num = num//6 l[i] = a", "while num>0: a += num%6 num = num//6 l[i] = a count =", "int(input()) l = list(map(int,input().split(', '))) for i in range(len(l)): num = l[i] a", "0 for i in range(n): for j in range(i,n): if l[i]>l[j]: count +=", "num>0: a += num%6 num = num//6 l[i] = a count = 0", "i in range(n): for j in range(i,n): if l[i]>l[j]: count += 1 print(count)", "= int(input()) l = list(map(int,input().split(', '))) for i in range(len(l)): num = l[i]", "= l[i] a = 0 while num>0: a += num%6 num = num//6", "count = 0 for i in range(n): for j in range(i,n): if l[i]>l[j]:", "'))) for i in range(len(l)): num = l[i] a = 0 while num>0:", "num = l[i] a = 0 while num>0: a += num%6 num =", "a count = 0 for i in range(n): for j in range(i,n): if", "for i in range(len(l)): num = l[i] a = 0 while num>0: a", "range(len(l)): num = l[i] a = 0 while num>0: a += num%6 num", "0 while num>0: a += num%6 num = num//6 l[i] = a count", "a += num%6 num = num//6 l[i] = a count = 0 for", "for i in range(n): for j in range(i,n): if l[i]>l[j]: count += 1", "i in range(len(l)): num = l[i] a = 0 while num>0: a +=", "n = int(input()) l = list(map(int,input().split(', '))) for i in range(len(l)): num =", "num%6 num = num//6 l[i] = a count = 0 for i in" ]
[ "import migrations, models class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations =", "by Django 3.0.8 on 2020-07-29 00:27 from django.db import migrations, models class Migration(migrations.Migration):", "Django 3.0.8 on 2020-07-29 00:27 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "# Generated by Django 3.0.8 on 2020-07-29 00:27 from django.db import migrations, models", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations", "class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField( model_name='execution',", "Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField( model_name='execution', name='archived',", "2020-07-29 00:27 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('log_api',", "dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField( model_name='execution', name='archived', field=models.BooleanField(default=False,", "migrations, models class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations = [", "('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField( model_name='execution', name='archived', field=models.BooleanField(default=False, verbose_name='Archived'), ), ]", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ]", "00:27 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'),", "models class Migration(migrations.Migration): dependencies = [ ('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField(", "3.0.8 on 2020-07-29 00:27 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "[ ('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField( model_name='execution', name='archived', field=models.BooleanField(default=False, verbose_name='Archived'), ),", "Generated by Django 3.0.8 on 2020-07-29 00:27 from django.db import migrations, models class", "on 2020-07-29 00:27 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "= [ ('log_api', '0008_auto_20200728_2126'), ] operations = [ migrations.AlterField( model_name='execution', name='archived', field=models.BooleanField(default=False, verbose_name='Archived')," ]
[ "= [ \"It is certain\", \"It is decidedly so\", \"Without a doubt\", \"Yes", "doubt\", \"Yes definitely\", \"You may rely on it\", \"As I see it, yes\",", "\"Better not tell you now\", \"Cannot predict now\", \"Concentrate and ask again\", \"Don't", "tell you now\", \"Cannot predict now\", \"Concentrate and ask again\", \"Don't count on", "buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if", "pearl, config): self.pearl = pearl self.hangouts = self.pearl.hangouts self.config = config self.buildHandle() def", "\"My sources say no\", \"Outlook not so good\", \"Very doubtful\" ] def __init__(self,", "so good\", \"Very doubtful\" ] def __init__(self, pearl, config): self.pearl = pearl self.hangouts", "build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event", "ask again\", \"Don't count on it\", \"My reply is no\", \"My sources say", "is no\", \"My sources say no\", \"Outlook not so good\", \"Very doubtful\" ]", "no\", \"My sources say no\", \"Outlook not so good\", \"Very doubtful\" ] def", "self.buildHandle() def build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if", "= random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation) def load(pearl, config): return EightBallSession(pearl,", "def build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update):", "try again\", \"Ask again later\", \"Better not tell you now\", \"Cannot predict now\",", "import asyncio, random import nacre class EightBallSession: answers = [ \"It is certain\",", "messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message = random.choice(self.answers) conversation =", "may rely on it\", \"As I see it, yes\", \"Most likely\", \"Outlook good\",", "\"Outlook not so good\", \"Very doubtful\" ] def __init__(self, pearl, config): self.pearl =", "nacre class EightBallSession: answers = [ \"It is certain\", \"It is decidedly so\",", "handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def", "await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event)", "= pearl self.hangouts = self.pearl.hangouts self.config = config self.buildHandle() def build(self): pass def", "random import nacre class EightBallSession: answers = [ \"It is certain\", \"It is", "certain\", \"It is decidedly so\", \"Without a doubt\", \"Yes definitely\", \"You may rely", "see it, yes\", \"Most likely\", \"Outlook good\", \"Yes\", \"Signs point to yes\", \"Reply", "def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event", "nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event):", "event = update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message", "\"As I see it, yes\", \"Most likely\", \"Outlook good\", \"Yes\", \"Signs point to", "again\", \"Don't count on it\", \"My reply is no\", \"My sources say no\",", "respond(self, event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation) def load(pearl,", "if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message = random.choice(self.answers) conversation", "self.hangouts = self.pearl.hangouts self.config = config self.buildHandle() def build(self): pass def buildHandle(self): messageFilter", "on it\", \"As I see it, yes\", \"Most likely\", \"Outlook good\", \"Yes\", \"Signs", "messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event):", "you now\", \"Cannot predict now\", \"Concentrate and ask again\", \"Don't count on it\",", "\"My reply is no\", \"My sources say no\", \"Outlook not so good\", \"Very", "doubtful\" ] def __init__(self, pearl, config): self.pearl = pearl self.hangouts = self.pearl.hangouts self.config", "yes\", \"Reply hazy try again\", \"Ask again later\", \"Better not tell you now\",", "again later\", \"Better not tell you now\", \"Cannot predict now\", \"Concentrate and ask", "answers = [ \"It is certain\", \"It is decidedly so\", \"Without a doubt\",", "\"It is certain\", \"It is decidedly so\", \"Without a doubt\", \"Yes definitely\", \"You", "rely on it\", \"As I see it, yes\", \"Most likely\", \"Outlook good\", \"Yes\",", "\"Yes\", \"Signs point to yes\", \"Reply hazy try again\", \"Ask again later\", \"Better", "now\", \"Concentrate and ask again\", \"Don't count on it\", \"My reply is no\",", "self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await", "reply is no\", \"My sources say no\", \"Outlook not so good\", \"Very doubtful\"", "yes\", \"Most likely\", \"Outlook good\", \"Yes\", \"Signs point to yes\", \"Reply hazy try", "a doubt\", \"Yes definitely\", \"You may rely on it\", \"As I see it,", "[ \"It is certain\", \"It is decidedly so\", \"Without a doubt\", \"Yes definitely\",", "good\", \"Yes\", \"Signs point to yes\", \"Reply hazy try again\", \"Ask again later\",", "config self.buildHandle() def build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update):", "\"Very doubtful\" ] def __init__(self, pearl, config): self.pearl = pearl self.hangouts = self.pearl.hangouts", "not tell you now\", \"Cannot predict now\", \"Concentrate and ask again\", \"Don't count", "import nacre class EightBallSession: answers = [ \"It is certain\", \"It is decidedly", "and ask again\", \"Don't count on it\", \"My reply is no\", \"My sources", "self.pearl = pearl self.hangouts = self.pearl.hangouts self.config = config self.buildHandle() def build(self): pass", "asyncio, random import nacre class EightBallSession: answers = [ \"It is certain\", \"It", "is decidedly so\", \"Without a doubt\", \"Yes definitely\", \"You may rely on it\",", "= update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message =", "nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await self.respond(event)", "again\", \"Ask again later\", \"Better not tell you now\", \"Cannot predict now\", \"Concentrate", "] def __init__(self, pearl, config): self.pearl = pearl self.hangouts = self.pearl.hangouts self.config =", "\"Without a doubt\", \"Yes definitely\", \"You may rely on it\", \"As I see", "= self.pearl.hangouts self.config = config self.buildHandle() def build(self): pass def buildHandle(self): messageFilter =", "class EightBallSession: answers = [ \"It is certain\", \"It is decidedly so\", \"Without", "message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation) def load(pearl, config): return", "async def respond(self, event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation)", "count on it\", \"My reply is no\", \"My sources say no\", \"Outlook not", "is certain\", \"It is decidedly so\", \"Without a doubt\", \"Yes definitely\", \"You may", "\"Reply hazy try again\", \"Ask again later\", \"Better not tell you now\", \"Cannot", "\"You may rely on it\", \"As I see it, yes\", \"Most likely\", \"Outlook", "good\", \"Very doubtful\" ] def __init__(self, pearl, config): self.pearl = pearl self.hangouts =", "random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation) def load(pearl, config): return EightBallSession(pearl, config)", "\"Most likely\", \"Outlook good\", \"Yes\", \"Signs point to yes\", \"Reply hazy try again\",", "sources say no\", \"Outlook not so good\", \"Very doubtful\" ] def __init__(self, pearl,", "pearl self.hangouts = self.pearl.hangouts self.config = config self.buildHandle() def build(self): pass def buildHandle(self):", "config): self.pearl = pearl self.hangouts = self.pearl.hangouts self.config = config self.buildHandle() def build(self):", "\"Yes definitely\", \"You may rely on it\", \"As I see it, yes\", \"Most", "now\", \"Cannot predict now\", \"Concentrate and ask again\", \"Don't count on it\", \"My", "\"Ask again later\", \"Better not tell you now\", \"Cannot predict now\", \"Concentrate and", "\"It is decidedly so\", \"Without a doubt\", \"Yes definitely\", \"You may rely on", "def respond(self, event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation) def", "if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self,", "EightBallSession: answers = [ \"It is certain\", \"It is decidedly so\", \"Without a", "I see it, yes\", \"Most likely\", \"Outlook good\", \"Yes\", \"Signs point to yes\",", "\"Concentrate and ask again\", \"Don't count on it\", \"My reply is no\", \"My", "it\", \"My reply is no\", \"My sources say no\", \"Outlook not so good\",", "point to yes\", \"Reply hazy try again\", \"Ask again later\", \"Better not tell", "hazy try again\", \"Ask again later\", \"Better not tell you now\", \"Cannot predict", "\"Outlook good\", \"Yes\", \"Signs point to yes\", \"Reply hazy try again\", \"Ask again", "def __init__(self, pearl, config): self.pearl = pearl self.hangouts = self.pearl.hangouts self.config = config", "likely\", \"Outlook good\", \"Yes\", \"Signs point to yes\", \"Reply hazy try again\", \"Ask", "self.pearl.updateEvent.addListener(handle) async def respond(self, event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message,", "self.pearl.hangouts self.config = config self.buildHandle() def build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format']))", "pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event =", "definitely\", \"You may rely on it\", \"As I see it, yes\", \"Most likely\",", "on it\", \"My reply is no\", \"My sources say no\", \"Outlook not so", "to yes\", \"Reply hazy try again\", \"Ask again later\", \"Better not tell you", "decidedly so\", \"Without a doubt\", \"Yes definitely\", \"You may rely on it\", \"As", "predict now\", \"Concentrate and ask again\", \"Don't count on it\", \"My reply is", "say no\", \"Outlook not so good\", \"Very doubtful\" ] def __init__(self, pearl, config):", "= config self.buildHandle() def build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def", "no\", \"Outlook not so good\", \"Very doubtful\" ] def __init__(self, pearl, config): self.pearl", "= nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await", "it\", \"As I see it, yes\", \"Most likely\", \"Outlook good\", \"Yes\", \"Signs point", "not so good\", \"Very doubtful\" ] def __init__(self, pearl, config): self.pearl = pearl", "update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async def respond(self, event): message = random.choice(self.answers)", "later\", \"Better not tell you now\", \"Cannot predict now\", \"Concentrate and ask again\",", "it, yes\", \"Most likely\", \"Outlook good\", \"Yes\", \"Signs point to yes\", \"Reply hazy", "event): message = random.choice(self.answers) conversation = self.hangouts.getConversation(event=event) await self.hangouts.send(message, conversation) def load(pearl, config):", "__init__(self, pearl, config): self.pearl = pearl self.hangouts = self.pearl.hangouts self.config = config self.buildHandle()", "so\", \"Without a doubt\", \"Yes definitely\", \"You may rely on it\", \"As I", "async def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle)", "def handle(update): if nacre.handle.isMessageEvent(update): event = update.event_notification.event if messageFilter(event): await self.respond(event) self.pearl.updateEvent.addListener(handle) async", "\"Signs point to yes\", \"Reply hazy try again\", \"Ask again later\", \"Better not", "self.config = config self.buildHandle() def build(self): pass def buildHandle(self): messageFilter = nacre.handle.newMessageFilter('^{}\\s+8ball(\\s.*)?$'.format(self.pearl.config['format'])) async", "\"Cannot predict now\", \"Concentrate and ask again\", \"Don't count on it\", \"My reply", "\"Don't count on it\", \"My reply is no\", \"My sources say no\", \"Outlook" ]
[ "'here'. if not invoker.location: # This shouldn't ever happen, but... raise CommandError('You appear", "<filename>src/game/commands/general.py \"\"\" General commands that are available to everyone. \"\"\" import json import", "string list of commands in the table. \"\"\" buf = '' for cmd", "that are available to everyone. \"\"\" import json import settings from src.daemons.server.commands.command import", "class CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\" name = 'examine' aliases = ['ex',", "defaulting to 'here'. if not invoker.location: # This shouldn't ever happen, but... raise", "+= json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand):", "not parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse:", "the game. \"\"\" name = 'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\")", "self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf += '\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table", "import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an object.", "the dott version identifier. Currently a git commit hash. \"\"\" name = 'version'", "'.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring objects to see. speech_str = u\"%s", "obj.aliases: attributes_str += ' Aliases: %s\\n' % ', '.join(obj.aliases) if obj.location: attributes_str +=", "invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise", "puts us. leave_to = location.determine_leave_destination(invoker) # Use the original object's name for the", "sees. self_str = u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand):", "Currently a git commit hash. \"\"\" name = 'version' #noinspection PyUnusedLocal def func(self,", "nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with people in the same room as", "invoker.is_admin(): buf += '\\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf +=", "or admin. :rtype: str :returns: The object's appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand):", "\"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY basic WHO list. \"\"\" name", "= obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to traverse", "if nplayers == 1: retval += 'One player logged in.' else: retval +=", "like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to enter an object. \"\"\"", "of commands in the table. \"\"\" buf = '' for cmd in table.commands:", ":rtype: str :returns: The object's appearance, from the invoker's perspective. \"\"\" if invoker.is_admin():", "buf class CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside from always getting the object's", "78 buf += \"\\n %s version %s\\n\" % ( settings.GAME_NAME, settings.VERSION ) buf", "WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\" name = 'examine' aliases", "attributes_str = ' Parent: %s (%s)\\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str +=", "the invoker is a normal player, this will simply return the normal description.", "invoker is an admin. If so, admins get a very nerdy examine display", "\"\"\" Checks to see whether the invoker is an admin. If so, admins", "attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to traverse an exit. \"\"\" name = 'go'", "from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\" name", "the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to) class", "Given a CommandTable instance, return a string that lists the commands in the", "retval += \" %s\\n\" % account nplayers = len(accounts) if nplayers == 1:", "for other neighboring objects to see. speech_str = u\"%s says '%s'\" % (invoker.name,", "invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That", "raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You look around,", "appearance, despite whether the invoker is a player or admin. :rtype: str :returns:", "Presentational arrangement for other neighboring objects to see. speech_str = u\"%s says '%s'\"", "is a player or admin. :rtype: str :returns: The object's appearance. \"\"\" return", "object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows the dott version identifier.", "invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) # Determine where", "'' for cmd in table.commands: buf += ' %s' % cmd.name return buf", "the normal description. :rtype: str :returns: The object's appearance, from the invoker's perspective.", "with examine, aside from always getting the object's normal appearance, regardless of whether", "obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to enter an object. \"\"\" name = 'enter'", "from the game. \"\"\" name = 'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd):", ":param PlayerObject invoker: The player who ran the command. \"\"\" accounts = results['accounts']", "leave_to = location.determine_leave_destination(invoker) # Use the original object's name for the user message.", "return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY basic WHO list. \"\"\" name =", "% obj.description if obj.internal_description: attributes_str += ' Internal Description: %s\\n' % obj.internal_description if", "not obj_match: raise CommandError('No matching object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def", "deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): \"\"\" Once the proxy gets back to", "on who is connected, so the mud server has to ask. This is", "obj.internal_description if obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name", "nplayers == 1: retval += 'One player logged in.' else: retval += '%d", "CmdQuit(BaseCommand): \"\"\" Disconnects from the game. \"\"\" name = 'quit' #noinspection PyUnusedLocal def", "CmdVersion(BaseCommand): \"\"\" Shows the dott version identifier. Currently a git commit hash. \"\"\"", "name = 'say' def func(self, invoker, parsed_cmd): # The sentence to speak. speech", "location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a break-down", "currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows", "obj.location: attributes_str += ' Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += '", "to be nowhere. Bummer.') user_query = 'here' else: user_query = ' '.join(parsed_cmd.arguments) if", "invoker is a normal player, this will simply return the normal description. :rtype:", "def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string)", "is an admin or not. \"\"\" name = 'look' aliases = ['l'] def", "(if applicable), and admin status. \"\"\" name = 'commands' #noinspection PyUnusedLocal def func(self,", "invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\" Shows", "\"\"\" name = 'enter' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter", "us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original object's name for the user", "+= ' Aliases: %s\\n' % ', '.join(obj.aliases) if obj.location: attributes_str += ' Location:", "Attempts to leave an object. \"\"\" name = 'leave' #noinspection PyUnusedLocal def func(self,", "+= ' %s' % cmd.name return buf class CmdLook(CmdExamine): \"\"\" Synonymous with examine,", "= 'here' else: user_query = ' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must", "def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse", "connected, this callback triggers. :param dict results: The details returned by the proxy.", ":param dict results: The details returned by the proxy. :param PlayerObject invoker: The", "%s\\n' % obj.description if obj.internal_description: attributes_str += ' Internal Description: %s\\n' % obj.internal_description", "invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from the game. \"\"\" name =", "very nerdy examine display that shows an object's un-parsed name/description, and attributes. If", "' %s' % cmd.name return buf class CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside", "means defaulting to 'here'. if not invoker.location: # This shouldn't ever happen, but...", "version identifier. Currently a git commit hash. \"\"\" name = 'version' #noinspection PyUnusedLocal", "name = 'go' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through", "CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside from always getting the object's normal appearance,", "%s' % cmd.name return buf class CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside from", "self_str = u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\"", "%s\\n' % obj.internal_description if obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes,", "enter %s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to leave an object.", "list. :rtype: str :returns: A string list of commands in the table. \"\"\"", "the invoker sees. self_str = u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str)", "%s (%s)\\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str += ' Aliases: %s\\n' %", "# Determine where leaving the object puts us. leave_to = location.determine_leave_destination(invoker) # Use", "the proxy gets back to us on who is connected, this callback triggers.", "buf = '' if invoker.is_admin(): buf += '\\nGlobal Admin Commands:' buf += self._buffer_command_table(", "object's name for the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" %", "location.local_command_table: buf += '\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self,", "git commit hash. \"\"\" name = 'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd):", "examined. \"\"\" attributes_str = ' Parent: %s (%s)\\n' % (obj.parent, obj.base_type) if obj.aliases:", "def _buffer_command_table(self, table): \"\"\" Given a CommandTable instance, return a string that lists", "arguments means defaulting to 'here'. if not invoker.location: # This shouldn't ever happen,", "def func(self, invoker, parsed_cmd): buf = \"-\" * 78 buf += \"\\n %s", "room as you. \"\"\" name = 'say' def func(self, invoker, parsed_cmd): # The", "normal appearance, despite whether the invoker is a player or admin. :rtype: str", "player or admin. :rtype: str :returns: The object's appearance. \"\"\" return obj_match.get_appearance(invoker) class", "and a callback. \"\"\" service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def", "buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given a CommandTable", "your location's command table (if applicable), and admin status. \"\"\" name = 'commands'", "service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): \"\"\" Once the proxy gets back", "an object to examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching", "= invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You look around, but can't find it.\")", "if obj.internal_description: attributes_str += ' Internal Description: %s\\n' % obj.internal_description if obj.attributes: attributes_str", "%s\\n' % ', '.join(obj.aliases) if obj.location: attributes_str += ' Location: %s\\n' % obj.location.get_appearance_name(invoker)", "doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to enter an", "are currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\"", "PyUnusedLocal def func(self, invoker, parsed_cmd): service = invoker.mud_service # Buffer to send to", "if invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table", "the object's normal appearance, regardless of whether the player is an admin or", "deferred and a callback. \"\"\" service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker)", "in.' else: retval += '%d players logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand):", "invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given a CommandTable instance, return a string that", "CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination unknown.\")", "enter an object. \"\"\" name = 'enter' def func(self, invoker, parsed_cmd): if not", "service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): \"\"\"", "parsed_cmd.arguments: # No arguments means defaulting to 'here'. if not invoker.location: # This", "= 'look' aliases = ['l'] def get_appearance(self, obj_match, invoker): \"\"\" The 'look' command", "the invoker's perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def", "user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand):", "CmdSay(BaseCommand): \"\"\" Communicate with people in the same room as you. \"\"\" name", "CommandError('You must specify an object to examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match:", "= invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching object found.') appearance = self.get_appearance(obj_match,", "user_query = ' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must specify an object", "Attempts to traverse an exit. \"\"\" name = 'go' def func(self, invoker, parsed_cmd):", "buf += self._buffer_command_table( service.global_cmd_table ) location = invoker.location if location: if invoker.is_admin() and", "'\\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf += '\\nLocal", "invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string)", "speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from the game. \"\"\" name", "%s version %s\\n\" % ( settings.GAME_NAME, settings.VERSION ) buf += \"-\" * 78", "+= ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n' % obj.description", "'\\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal Commands:' buf", "which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination unknown.\") if not", "not can_enter: raise CommandError(cant_enter_msg) # Determine where entering the object puts us. enter_to", "an exit. \"\"\" name = 'go' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments:", "location.determine_leave_destination(invoker) # Use the original object's name for the user message. leave_from_name =", ") invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given a CommandTable instance, return a string", "func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if", "+= \" %s\\n\" % account nplayers = len(accounts) if nplayers == 1: retval", "obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n' % obj.description if obj.internal_description: attributes_str += '", "class CmdVersion(BaseCommand): \"\"\" Shows the dott version identifier. Currently a git commit hash.", "of available commands. Takes into account your location's command table (if applicable), and", "instance, return a string that lists the commands in the table. :param CommandTable", "that are currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand):", "def func(self, invoker, parsed_cmd): service = invoker.mud_service # Buffer to send to user.", "invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not", "admins get a very nerdy examine display that shows an object's un-parsed name/description,", "if obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name =", "table. :param CommandTable table: The command table whose commands to list. :rtype: str", "obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type ==", "for the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to)", "available to everyone. \"\"\" import json import settings from src.daemons.server.commands.command import BaseCommand from", "CmdGo(BaseCommand): \"\"\" Attempts to traverse an exit. \"\"\" name = 'go' def func(self,", ":returns: The object's appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY basic", "perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj,", "player who ran the command. \"\"\" accounts = results['accounts'] retval = \"Player\\n\" for", "if not user_query: raise CommandError('You must specify an object to examine.') obj_match =", "account in accounts: retval += \" %s\\n\" % account nplayers = len(accounts) if", "obj_to_traverse: raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look like", "def get_appearance(self, obj_match, invoker): \"\"\" The 'look' command always shows an object's normal", "invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You look around, but can't find it.\") can_enter,", "invoker) def _wholist_callback(self, results, invoker): \"\"\" Once the proxy gets back to us", "= location.determine_leave_destination(invoker) # Use the original object's name for the user message. leave_from_name", "leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a break-down of available commands. Takes into", "attributes_str += ' Internal Description: %s\\n' % obj.internal_description if obj.attributes: attributes_str += '\\n###", "obj_match, invoker): \"\"\" The 'look' command always shows an object's normal appearance, despite", "name for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name)", "always shows an object's normal appearance, despite whether the invoker is a player", "obj.base_type) if obj.aliases: attributes_str += ' Aliases: %s\\n' % ', '.join(obj.aliases) if obj.location:", "The object's appearance, from the invoker's perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker)", "% obj.internal_description if obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3)", "Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf += '\\nLocal Commands:'", "retval += 'One player logged in.' else: retval += '%d players logged in.'", "General commands that are available to everyone. \"\"\" import json import settings from", "WHO list. \"\"\" name = 'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\"", "'One player logged in.' else: retval += '%d players logged in.' % nplayers", "sentence to speak. speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring", "parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter:", "commit hash. \"\"\" name = 'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf", "attributes_str += ' Description: %s\\n' % obj.description if obj.internal_description: attributes_str += ' Internal", "object's un-parsed name/description, and attributes. If the invoker is a normal player, this", "account nplayers = len(accounts) if nplayers == 1: retval += 'One player logged", "import BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand):", "sessions that are currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class", "' Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone: %s\\n' %", "not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You", "players logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with people in", "invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from the game. \"\"\" name = 'quit' #noinspection", "CmdLeave(BaseCommand): \"\"\" Attempts to leave an object. \"\"\" name = 'leave' #noinspection PyUnusedLocal", "admin. :rtype: str :returns: The object's appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\"", "if obj.location: attributes_str += ' Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str +=", "admin. If so, admins get a very nerdy examine display that shows an", "['ex', 'exa'] def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: # No arguments means", "(%s)\\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str += ' Aliases: %s\\n' % ',", "\"\"\" name = 'go' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go", "Checks to see whether the invoker is an admin. If so, admins get", "# Use the original object's name for the user message. leave_from_name = location.get_appearance_name(invoker)", "find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) # Determine", "who is connected, so the mud server has to ask. This is handled", "name = 'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service = invoker.mud_service #", "else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\" Shows the object as it", "this callback triggers. :param dict results: The details returned by the proxy. :param", "object to examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching object", "the same room as you. \"\"\" name = 'say' def func(self, invoker, parsed_cmd):", "buf += '\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table):", "location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given a CommandTable instance, return a", "# This shouldn't ever happen, but... raise CommandError('You appear to be nowhere. Bummer.')", "%s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a break-down of available commands.", "#noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\" The proxy has all of the", "else: user_query = ' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must specify an", "parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise", "ever happen, but... raise CommandError('You appear to be nowhere. Bummer.') user_query = 'here'", "description. :rtype: str :returns: The object's appearance, from the invoker's perspective. \"\"\" if", "Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf += '\\nLocal Commands:' buf", "Once the proxy gets back to us on who is connected, this callback", "the player is an admin or not. \"\"\" name = 'look' aliases =", "', '.join(obj.aliases) if obj.location: attributes_str += ' Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone:", "entering the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original object's", "If the invoker is a normal player, this will simply return the normal", "lists the commands in the table. :param CommandTable table: The command table whose", "invoker): \"\"\" The 'look' command always shows an object's normal appearance, despite whether", "= 'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service = invoker.mud_service # Buffer", "to disconnect any sessions that are currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd,", "triggers. :param dict results: The details returned by the proxy. :param PlayerObject invoker:", "a callback. \"\"\" service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self,", "'exa'] def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: # No arguments means defaulting", "status. \"\"\" name = 'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service =", "into account your location's command table (if applicable), and admin status. \"\"\" name", "what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You look around, but can't", "retval += '%d players logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate", "shows an object's un-parsed name/description, and attributes. If the invoker is a normal", "normal player, this will simply return the normal description. :rtype: str :returns: The", "' Aliases: %s\\n' % ', '.join(obj.aliases) if obj.location: attributes_str += ' Location: %s\\n'", "\"\"\" Synonymous with examine, aside from always getting the object's normal appearance, regardless", "other neighboring objects to see. speech_str = u\"%s says '%s'\" % (invoker.name, speech)", "import json import settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError from", "the command. \"\"\" accounts = results['accounts'] retval = \"Player\\n\" for account in accounts:", "invoker.location: # This shouldn't ever happen, but... raise CommandError('You appear to be nowhere.", "src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class", "\"\"\" attributes_str = ' Parent: %s (%s)\\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str", "func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This asks the proxy to", "object. \"\"\" name = 'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location =", "BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\"", "un-parsed name/description, and attributes. If the invoker is a normal player, this will", "name = 'examine' aliases = ['ex', 'exa'] def func(self, invoker, parsed_cmd): if not", "' Internal Description: %s\\n' % obj.internal_description if obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n'", "str :returns: The object's appearance, from the invoker's perspective. \"\"\" if invoker.is_admin(): return", "command table (if applicable), and admin status. \"\"\" name = 'commands' #noinspection PyUnusedLocal", "a player or admin. :rtype: str :returns: The object's appearance. \"\"\" return obj_match.get_appearance(invoker)", "invoker's perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self,", "user_query = 'here' else: user_query = ' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You", "but can't find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg)", "%s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str", "attributes_str += ' Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone:", "attributes_str += ' Aliases: %s\\n' % ', '.join(obj.aliases) if obj.location: attributes_str += '", ":rtype: str :returns: The object's appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A", "proxy to disconnect any sessions that are currently # controlling this object. service.proxyamp.callRemote(", "u\"%s says '%s'\" % (invoker.name, speech) # What the invoker sees. self_str =", "\"\"\" General commands that are available to everyone. \"\"\" import json import settings", "= \"-\" * 78 buf += \"\\n %s version %s\\n\" % ( settings.GAME_NAME,", "+= \"\\n %s version %s\\n\" % ( settings.GAME_NAME, settings.VERSION ) buf += \"-\"", "appearance, from the invoker's perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return", "obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\"", "people in the same room as you. \"\"\" name = 'say' def func(self,", "were examined. \"\"\" attributes_str = ' Parent: %s (%s)\\n' % (obj.parent, obj.base_type) if", "def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This asks the proxy", "% (invoker.name, speech) # What the invoker sees. self_str = u\"You say '%s'\"", "from always getting the object's normal appearance, regardless of whether the player is", "No arguments means defaulting to 'here'. if not invoker.location: # This shouldn't ever", "+= '%d players logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with", "self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\" Checks to see whether the", "invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a break-down of available commands. Takes into account", "'enter' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter =", "buf += self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table", "the object as it were examined. \"\"\" attributes_str = ' Parent: %s (%s)\\n'", "name/description, and attributes. If the invoker is a normal player, this will simply", "from the invoker's perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker)", "can't find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) #", "table (if applicable), and admin status. \"\"\" name = 'commands' #noinspection PyUnusedLocal def", "not parsed_cmd.arguments: # No arguments means defaulting to 'here'. if not invoker.location: #", "= location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a", "invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to leave an", "name = 'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location = invoker.location can_leave,", "command table whose commands to list. :rtype: str :returns: A string list of", "What the invoker sees. self_str = u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker])", "around, but can't find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise", "can_enter: raise CommandError(cant_enter_msg) # Determine where entering the object puts us. enter_to =", "get_appearance(self, obj_match, invoker): \"\"\" The 'look' command always shows an object's normal appearance,", "obj_match: raise CommandError('No matching object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self,", "Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given a", "object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows the dott version identifier. Currently a git", "not. \"\"\" name = 'look' aliases = ['l'] def get_appearance(self, obj_match, invoker): \"\"\"", "\"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to traverse an exit. \"\"\"", "buf = \"-\" * 78 buf += \"\\n %s version %s\\n\" % (", "original object's name for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\"", "= 'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg", "% account nplayers = len(accounts) if nplayers == 1: retval += 'One player", "regardless of whether the player is an admin or not. \"\"\" name =", "not can_leave: raise CommandError(cant_leave_msg) # Determine where leaving the object puts us. leave_to", "simply return the normal description. :rtype: str :returns: The object's appearance, from the", "= obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) # Determine where entering the object", "= u\"%s says '%s'\" % (invoker.name, speech) # What the invoker sees. self_str", "traverse an exit. \"\"\" name = 'go' def func(self, invoker, parsed_cmd): if not", "admin status. \"\"\" name = 'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service", "#noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service = invoker.mud_service # Buffer to send", "from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines", "['l'] def get_appearance(self, obj_match, invoker): \"\"\" The 'look' command always shows an object's", "an admin. If so, admins get a very nerdy examine display that shows", "if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\"", "\"\"\" Communicate with people in the same room as you. \"\"\" name =", "so, admins get a very nerdy examine display that shows an object's un-parsed", "exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type", "Buffer to send to user. buf = '' if invoker.is_admin(): buf += '\\nGlobal", "commands to list. :rtype: str :returns: A string list of commands in the", "\"\"\" name = 'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location = invoker.location", "whether the invoker is an admin. If so, admins get a very nerdy", "look around, but can't find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter:", "= 'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service #", "The 'look' command always shows an object's normal appearance, despite whether the invoker", "account your location's command table (if applicable), and admin status. \"\"\" name =", "ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name,", "%s\\n\" % account nplayers = len(accounts) if nplayers == 1: retval += 'One", "can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) # Determine where entering", "parsed_cmd): # The sentence to speak. speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement", "enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts", "A REALLY basic WHO list. \"\"\" name = 'who' #noinspection PyUnusedLocal def func(self,", "CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You look around, but", "is connected, this callback triggers. :param dict results: The details returned by the", "controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows the dott", "all of the details on who is connected, so the mud server has", "raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look like an", "'\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given", "shows an object's normal appearance, despite whether the invoker is a player or", "+= ' Description: %s\\n' % obj.description if obj.internal_description: attributes_str += ' Internal Description:", "+= 'One player logged in.' else: retval += '%d players logged in.' %", "not user_query: raise CommandError('You must specify an object to examine.') obj_match = invoker.contextual_object_search(user_query)", "Bummer.') user_query = 'here' else: user_query = ' '.join(parsed_cmd.arguments) if not user_query: raise", "player is an admin or not. \"\"\" name = 'look' aliases = ['l']", "If so, admins get a very nerdy examine display that shows an object's", "obj, invoker): \"\"\" Checks to see whether the invoker is an admin. If", "parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if", "return a string that lists the commands in the table. :param CommandTable table:", "invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with people in the same room as you.", "Shows the dott version identifier. Currently a git commit hash. \"\"\" name =", "the invoker is an admin. If so, admins get a very nerdy examine", "Attempts to enter an object. \"\"\" name = 'enter' def func(self, invoker, parsed_cmd):", "self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location", "obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to traverse an", "= 'say' def func(self, invoker, parsed_cmd): # The sentence to speak. speech =", "CmdWho(BaseCommand): \"\"\" A REALLY basic WHO list. \"\"\" name = 'who' #noinspection PyUnusedLocal", "Synonymous with examine, aside from always getting the object's normal appearance, regardless of", "cmd.name return buf class CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside from always getting", "from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd", "commands that are available to everyone. \"\"\" import json import settings from src.daemons.server.commands.command", "= results['accounts'] retval = \"Player\\n\" for account in accounts: retval += \" %s\\n\"", "results, invoker): \"\"\" Once the proxy gets back to us on who is", "= invoker.mud_service # This asks the proxy to disconnect any sessions that are", "+= self._buffer_command_table( service.global_cmd_table ) location = invoker.location if location: if invoker.is_admin() and location.local_admin_command_table:", "% ', '.join(obj.aliases) if obj.location: attributes_str += ' Location: %s\\n' % obj.location.get_appearance_name(invoker) if", "version %s\\n\" % ( settings.GAME_NAME, settings.VERSION ) buf += \"-\" * 78 invoker.emit_to(buffer)", "examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching object found.') appearance", "arrangement for other neighboring objects to see. speech_str = u\"%s says '%s'\" %", "player logged in.' else: retval += '%d players logged in.' % nplayers invoker.emit_to(retval)", "neighboring objects to see. speech_str = u\"%s says '%s'\" % (invoker.name, speech) #", "func(self, invoker, parsed_cmd): # The sentence to speak. speech = u' '.join(parsed_cmd.arguments) #", ") location = invoker.location if location: if invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal", "raise CommandError('No matching object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj,", "invoker.mud_service # Buffer to send to user. buf = '' if invoker.is_admin(): buf", "'look' aliases = ['l'] def get_appearance(self, obj_match, invoker): \"\"\" The 'look' command always", "% enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to leave an object. \"\"\" name", "1: retval += 'One player logged in.' else: retval += '%d players logged", "is an admin. If so, admins get a very nerdy examine display that", "in the same room as you. \"\"\" name = 'say' def func(self, invoker,", "parsed_cmd): if not parsed_cmd.arguments: # No arguments means defaulting to 'here'. if not", "parsed_cmd): location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg)", "available commands. Takes into account your location's command table (if applicable), and admin", "str :returns: The object's appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY", "object's normal appearance, regardless of whether the player is an admin or not.", "location.local_admin_command_table ) if location.local_command_table: buf += '\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table )", "be nowhere. Bummer.') user_query = 'here' else: user_query = ' '.join(parsed_cmd.arguments) if not", "invoker, parsed_cmd): if not parsed_cmd.arguments: # No arguments means defaulting to 'here'. if", "matching object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\"", "Takes into account your location's command table (if applicable), and admin status. \"\"\"", "buf += \"\\n %s version %s\\n\" % ( settings.GAME_NAME, settings.VERSION ) buf +=", "an object. \"\"\" name = 'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location", "'%s'\" % (invoker.name, speech) # What the invoker sees. self_str = u\"You say", "in the table. :param CommandTable table: The command table whose commands to list.", "normal appearance, regardless of whether the player is an admin or not. \"\"\"", "Description: %s\\n' % obj.internal_description if obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str +=", "'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg =", "shouldn't ever happen, but... raise CommandError('You appear to be nowhere. Bummer.') user_query =", "proxy has all of the details on who is connected, so the mud", "of the details on who is connected, so the mud server has to", "the proxy to disconnect any sessions that are currently # controlling this object.", "indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts", "a break-down of available commands. Takes into account your location's command table (if", "object puts us. leave_to = location.determine_leave_destination(invoker) # Use the original object's name for", "so the mud server has to ask. This is handled through a deferred", "speech_str = u\"%s says '%s'\" % (invoker.name, speech) # What the invoker sees.", "len(accounts) if nplayers == 1: retval += 'One player logged in.' else: retval", "cmd in table.commands: buf += ' %s' % cmd.name return buf class CmdLook(CmdExamine):", "service = invoker.mud_service # This asks the proxy to disconnect any sessions that", "mud server has to ask. This is handled through a deferred and a", "\"\"\" name = 'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\" The proxy", "location = invoker.location if location: if invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal Admin", "the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to) class", "Use the original object's name for the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You", "\"\\n %s version %s\\n\" % ( settings.GAME_NAME, settings.VERSION ) buf += \"-\" *", "enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original object's name for the user message.", "it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) # Determine where", "puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original object's name for the", "#noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This asks", "PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This asks the", "class CmdLeave(BaseCommand): \"\"\" Attempts to leave an object. \"\"\" name = 'leave' #noinspection", "Lists a break-down of available commands. Takes into account your location's command table", "CommandTable instance, return a string that lists the commands in the table. :param", "= 'enter' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter", "not obj_to_enter: raise CommandError(\"You look around, but can't find it.\") can_enter, cant_enter_msg =", "parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This asks the proxy to disconnect any", "def func(self, invoker, parsed_cmd): \"\"\" The proxy has all of the details on", ") buf += '\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location = invoker.location", "\"\"\" buf = '' for cmd in table.commands: buf += ' %s' %", "src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\" name =", "This asks the proxy to disconnect any sessions that are currently # controlling", "\"\"\" Once the proxy gets back to us on who is connected, this", "this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows the dott version", ") class CmdVersion(BaseCommand): \"\"\" Shows the dott version identifier. Currently a git commit", "asks the proxy to disconnect any sessions that are currently # controlling this", "= '' if invoker.is_admin(): buf += '\\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table", "= u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects", "% obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n' % obj.description if obj.internal_description: attributes_str +=", "\"\"\" Attempts to traverse an exit. \"\"\" name = 'go' def func(self, invoker,", "location's command table (if applicable), and admin status. \"\"\" name = 'commands' #noinspection", "speech) # What the invoker sees. self_str = u\"You say '%s'\" % speech", "a string that lists the commands in the table. :param CommandTable table: The", "\"\"\" service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker):", "CommandTable table: The command table whose commands to list. :rtype: str :returns: A", "str :returns: A string list of commands in the table. \"\"\" buf =", "dict results: The details returned by the proxy. :param PlayerObject invoker: The player", "leave an object. \"\"\" name = 'leave' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd):", "'go' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through which exit?')", "# Buffer to send to user. buf = '' if invoker.is_admin(): buf +=", "returned by the proxy. :param PlayerObject invoker: The player who ran the command.", "= invoker.mud_service # Buffer to send to user. buf = '' if invoker.is_admin():", "invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching object found.') appearance = self.get_appearance(obj_match, invoker)", "import settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import", "not invoker.location: # This shouldn't ever happen, but... raise CommandError('You appear to be", "# This asks the proxy to disconnect any sessions that are currently #", "appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\" Checks to see", "'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf = \"-\" * 78 buf", "func(self, invoker, parsed_cmd): \"\"\" The proxy has all of the details on who", "% (obj.parent, obj.base_type) if obj.aliases: attributes_str += ' Aliases: %s\\n' % ', '.join(obj.aliases)", "= 'examine' aliases = ['ex', 'exa'] def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments:", "CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\" name = 'examine' aliases = ['ex', 'exa']", "exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to enter an object. \"\"\" name =", "table: The command table whose commands to list. :rtype: str :returns: A string", "getting the object's normal appearance, regardless of whether the player is an admin", "object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original object's name for", "obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) # Determine where entering the object puts", "= obj_to_enter.determine_enter_destination(invoker) # Use the original object's name for the user message. enter_to_name", "Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker)", "invoker.emit_to(\"That doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to enter", "obj_to_enter.determine_enter_destination(invoker) # Use the original object's name for the user message. enter_to_name =", "self._buffer_command_table( service.global_cmd_table ) location = invoker.location if location: if invoker.is_admin() and location.local_admin_command_table: buf", "everyone. \"\"\" import json import settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import", "+= self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given a CommandTable instance,", "can_leave: raise CommandError(cant_leave_msg) # Determine where leaving the object puts us. leave_to =", "name = 'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\" The proxy has", "= 'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf = \"-\" * 78", "that lists the commands in the table. :param CommandTable table: The command table", "obj, invoker): \"\"\" Shows the object as it were examined. \"\"\" attributes_str =", "' Description: %s\\n' % obj.description if obj.internal_description: attributes_str += ' Internal Description: %s\\n'", "+= '\\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal Commands:'", "object. \"\"\" name = 'enter' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise", "The command table whose commands to list. :rtype: str :returns: A string list", "list of commands in the table. \"\"\" buf = '' for cmd in", "' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n' % obj.description if", "to see whether the invoker is an admin. If so, admins get a", "to ask. This is handled through a deferred and a callback. \"\"\" service", "func(self, invoker, parsed_cmd): service = invoker.mud_service # Buffer to send to user. buf", "# The sentence to speak. speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement for", "if obj.aliases: attributes_str += ' Aliases: %s\\n' % ', '.join(obj.aliases) if obj.location: attributes_str", "\"\"\" The proxy has all of the details on who is connected, so", "class CmdSay(BaseCommand): \"\"\" Communicate with people in the same room as you. \"\"\"", "aside from always getting the object's normal appearance, regardless of whether the player", "= ['ex', 'exa'] def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: # No arguments", "original object's name for the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\"", "found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\" Checks to", "it were examined. \"\"\" attributes_str = ' Parent: %s (%s)\\n' % (obj.parent, obj.base_type)", "return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\" Shows the object as it were", "back to us on who is connected, this callback triggers. :param dict results:", "accounts = results['accounts'] retval = \"Player\\n\" for account in accounts: retval += \"", "buf = '' for cmd in table.commands: buf += ' %s' % cmd.name", "Examines an object. \"\"\" name = 'examine' aliases = ['ex', 'exa'] def func(self,", "Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location = invoker.location if location: if invoker.is_admin()", "'.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must specify an object to examine.') obj_match", "invoker, parsed_cmd): service = invoker.mud_service # Buffer to send to user. buf =", "name = 'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf = \"-\" *", "the proxy. :param PlayerObject invoker: The player who ran the command. \"\"\" accounts", "+= '\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location = invoker.location if location:", "look like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to enter an object.", "by the proxy. :param PlayerObject invoker: The player who ran the command. \"\"\"", "player, this will simply return the normal description. :rtype: str :returns: The object's", "parsed_cmd): \"\"\" The proxy has all of the details on who is connected,", "#noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker)", "= invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): \"\"\" Once", "raise CommandError(cant_leave_msg) # Determine where leaving the object puts us. leave_to = location.determine_leave_destination(invoker)", "def get_examine_appearance(self, obj, invoker): \"\"\" Shows the object as it were examined. \"\"\"", "== 'exit': invoker.emit_to(\"That doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts", "%s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to leave an object. \"\"\"", "command always shows an object's normal appearance, despite whether the invoker is a", "REALLY basic WHO list. \"\"\" name = 'who' #noinspection PyUnusedLocal def func(self, invoker,", "CommandError(cant_enter_msg) # Determine where entering the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) #", "to leave an object. \"\"\" name = 'leave' #noinspection PyUnusedLocal def func(self, invoker,", "service.global_admin_cmd_table ) buf += '\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location =", "= 'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\" The proxy has all", "invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a break-down of", "where entering the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original", "\" %s\\n\" % account nplayers = len(accounts) if nplayers == 1: retval +=", "in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with people in the same", "'%d players logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with people", "'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service = invoker.mud_service # Buffer to", "invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): \"\"\" Once the", "(invoker.name, speech) # What the invoker sees. self_str = u\"You say '%s'\" %", "% cmd.name return buf class CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside from always", "admin or not. \"\"\" name = 'look' aliases = ['l'] def get_appearance(self, obj_match,", "for account in accounts: retval += \" %s\\n\" % account nplayers = len(accounts)", "message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\"", "if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker) class", ":returns: The object's appearance, from the invoker's perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj,", "in the table. \"\"\" buf = '' for cmd in table.commands: buf +=", "settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd,", "us. leave_to = location.determine_leave_destination(invoker) # Use the original object's name for the user", "is handled through a deferred and a callback. \"\"\" service = invoker.mud_service deferred", "The details returned by the proxy. :param PlayerObject invoker: The player who ran", "callback triggers. :param dict results: The details returned by the proxy. :param PlayerObject", "class CmdQuit(BaseCommand): \"\"\" Disconnects from the game. \"\"\" name = 'quit' #noinspection PyUnusedLocal", "'exit': invoker.emit_to(\"That doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to", "attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return", "raise CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination", "leave %s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a break-down of available", "# Use the original object's name for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker)", "or not. \"\"\" name = 'look' aliases = ['l'] def get_appearance(self, obj_match, invoker):", "\"\"\" name = 'examine' aliases = ['ex', 'exa'] def func(self, invoker, parsed_cmd): if", "Internal Description: %s\\n' % obj.internal_description if obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str", "raise CommandError('You appear to be nowhere. Bummer.') user_query = 'here' else: user_query =", "'' if invoker.is_admin(): buf += '\\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table )", "appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY basic WHO list. \"\"\"", "invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\" Shows the object as", "The proxy has all of the details on who is connected, so the", "message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\"", "gets back to us on who is connected, this callback triggers. :param dict", "name = 'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service", "= location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) # Determine where leaving the object", "object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\" Checks", "get_examine_appearance(self, obj, invoker): \"\"\" Shows the object as it were examined. \"\"\" attributes_str", "= self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\" Checks to see whether", "not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand):", "Communicate with people in the same room as you. \"\"\" name = 'say'", "\"\"\" Attempts to enter an object. \"\"\" name = 'enter' def func(self, invoker,", "#noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf = \"-\" * 78 buf +=", "leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists", "speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring objects to see.", "\"\"\" Attempts to leave an object. \"\"\" name = 'leave' #noinspection PyUnusedLocal def", "an object's normal appearance, despite whether the invoker is a player or admin.", "get a very nerdy examine display that shows an object's un-parsed name/description, and", "class CmdGo(BaseCommand): \"\"\" Attempts to traverse an exit. \"\"\" name = 'go' def", "+= self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table )", "whose commands to list. :rtype: str :returns: A string list of commands in", "'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This", "PyUnusedLocal def func(self, invoker, parsed_cmd): buf = \"-\" * 78 buf += \"\\n", "invoker, parsed_cmd): buf = \"-\" * 78 buf += \"\\n %s version %s\\n\"", "to send to user. buf = '' if invoker.is_admin(): buf += '\\nGlobal Admin", "A string list of commands in the table. \"\"\" buf = '' for", "see whether the invoker is an admin. If so, admins get a very", "same room as you. \"\"\" name = 'say' def func(self, invoker, parsed_cmd): #", "class CmdEnter(BaseCommand): \"\"\" Attempts to enter an object. \"\"\" name = 'enter' def", "the table. :param CommandTable table: The command table whose commands to list. :rtype:", "get_appearance(self, obj, invoker): \"\"\" Checks to see whether the invoker is an admin.", "\"\"\" accounts = results['accounts'] retval = \"Player\\n\" for account in accounts: retval +=", "def func(self, invoker, parsed_cmd): # The sentence to speak. speech = u' '.join(parsed_cmd.arguments)", "nerdy examine display that shows an object's un-parsed name/description, and attributes. If the", "json import settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp", "\"\"\" name = 'look' aliases = ['l'] def get_appearance(self, obj_match, invoker): \"\"\" The", "cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not can_enter: raise CommandError(cant_enter_msg) # Determine where entering the", "def get_appearance(self, obj, invoker): \"\"\" Checks to see whether the invoker is an", "specify an object to examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No", "# Presentational arrangement for other neighboring objects to see. speech_str = u\"%s says", "func(self, invoker, parsed_cmd): buf = \"-\" * 78 buf += \"\\n %s version", "list. \"\"\" name = 'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\" The", "commands in the table. \"\"\" buf = '' for cmd in table.commands: buf", "if not obj_to_enter: raise CommandError(\"You look around, but can't find it.\") can_enter, cant_enter_msg", "Determine where entering the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the", "nowhere. Bummer.') user_query = 'here' else: user_query = ' '.join(parsed_cmd.arguments) if not user_query:", "src.daemons.server.commands.exceptions import CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an", "if location: if invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal Admin Commands:' buf +=", "Description: %s\\n' % obj.description if obj.internal_description: attributes_str += ' Internal Description: %s\\n' %", "hash. \"\"\" name = 'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf =", "speak. speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring objects to", "class CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside from always getting the object's normal", "+= self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf += '\\nLocal Commands:' buf += self._buffer_command_table(", "def func(self, invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not", "\"\"\" name = 'say' def func(self, invoker, parsed_cmd): # The sentence to speak.", "+= '\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\"", "who is connected, this callback triggers. :param dict results: The details returned by", "CommandError('You appear to be nowhere. Bummer.') user_query = 'here' else: user_query = '", "not obj_to_traverse: raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look", "game. \"\"\" name = 'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service", "PyUnusedLocal def func(self, invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if", "== 1: retval += 'One player logged in.' else: retval += '%d players", "def _wholist_callback(self, results, invoker): \"\"\" Once the proxy gets back to us on", "logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with people in the", "and admin status. \"\"\" name = 'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd):", "else: retval += '%d players logged in.' % nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\"", "disconnect any sessions that are currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id,", "an exit.\") obj_to_traverse.pass_object_through(invoker) class CmdEnter(BaseCommand): \"\"\" Attempts to enter an object. \"\"\" name", "has to ask. This is handled through a deferred and a callback. \"\"\"", "if not invoker.location: # This shouldn't ever happen, but... raise CommandError('You appear to", "results['accounts'] retval = \"Player\\n\" for account in accounts: retval += \" %s\\n\" %", "+= '\\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf +=", "'look' command always shows an object's normal appearance, despite whether the invoker is", "\"\"\" Disconnects from the game. \"\"\" name = 'quit' #noinspection PyUnusedLocal def func(self,", "location.local_admin_command_table: buf += '\\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table:", "commands. Takes into account your location's command table (if applicable), and admin status.", "(obj.parent, obj.base_type) if obj.aliases: attributes_str += ' Aliases: %s\\n' % ', '.join(obj.aliases) if", "will simply return the normal description. :rtype: str :returns: The object's appearance, from", "name = 'look' aliases = ['l'] def get_appearance(self, obj_match, invoker): \"\"\" The 'look'", "attributes. If the invoker is a normal player, this will simply return the", "invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\" Checks to see whether the invoker is", "\"\"\" name = 'commands' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): service = invoker.mud_service", "an admin or not. \"\"\" name = 'look' aliases = ['l'] def get_appearance(self,", "func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse =", "invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to leave an object. \"\"\" name = 'leave'", "table.commands: buf += ' %s' % cmd.name return buf class CmdLook(CmdExamine): \"\"\" Synonymous", "you. \"\"\" name = 'say' def func(self, invoker, parsed_cmd): # The sentence to", "the commands in the table. :param CommandTable table: The command table whose commands", "name for the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave %s\" % leave_from_name)", "commands in the table. :param CommandTable table: The command table whose commands to", "send to user. buf = '' if invoker.is_admin(): buf += '\\nGlobal Admin Commands:'", "location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) #", "handled through a deferred and a callback. \"\"\" service = invoker.mud_service deferred =", "\"Player\\n\" for account in accounts: retval += \" %s\\n\" % account nplayers =", "for cmd in table.commands: buf += ' %s' % cmd.name return buf class", "an object. \"\"\" name = 'enter' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments:", "string that lists the commands in the table. :param CommandTable table: The command", "ask. This is handled through a deferred and a callback. \"\"\" service =", "can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) # Determine where leaving", "always getting the object's normal appearance, regardless of whether the player is an", "whether the invoker is a player or admin. :rtype: str :returns: The object's", "_buffer_command_table(self, table): \"\"\" Given a CommandTable instance, return a string that lists the", "table): \"\"\" Given a CommandTable instance, return a string that lists the commands", "parsed_cmd): buf = \"-\" * 78 buf += \"\\n %s version %s\\n\" %", "Parent: %s (%s)\\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str += ' Aliases: %s\\n'", "user_query: raise CommandError('You must specify an object to examine.') obj_match = invoker.contextual_object_search(user_query) if", "appear to be nowhere. Bummer.') user_query = 'here' else: user_query = ' '.join(parsed_cmd.arguments)", "return the normal description. :rtype: str :returns: The object's appearance, from the invoker's", "the original object's name for the user message. leave_from_name = location.get_appearance_name(invoker) invoker.emit_to(\"You leave", ":rtype: str :returns: A string list of commands in the table. \"\"\" buf", "see. speech_str = u\"%s says '%s'\" % (invoker.name, speech) # What the invoker", "raise CommandError(\"You look around, but can't find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if", "Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal Commands:' buf +=", "are available to everyone. \"\"\" import json import settings from src.daemons.server.commands.command import BaseCommand", "import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\" name = 'examine'", "if obj.zone: attributes_str += ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description:", "' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must specify an object to examine.')", "with people in the same room as you. \"\"\" name = 'say' def", "= obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to", "= invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) # Determine", "the invoker is a player or admin. :rtype: str :returns: The object's appearance.", "object's name for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" %", "object as it were examined. \"\"\" attributes_str = ' Parent: %s (%s)\\n' %", "CommandError(cant_leave_msg) # Determine where leaving the object puts us. leave_to = location.determine_leave_destination(invoker) #", "applicable), and admin status. \"\"\" name = 'commands' #noinspection PyUnusedLocal def func(self, invoker,", "\"\"\" name = 'quit' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service =", "examine display that shows an object's un-parsed name/description, and attributes. If the invoker", "where leaving the object puts us. leave_to = location.determine_leave_destination(invoker) # Use the original", "the mud server has to ask. This is handled through a deferred and", "\"\"\" import json import settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions import CommandError", "obj.attributes: attributes_str += '\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker)", "attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str) class", "Disconnects from the game. \"\"\" name = 'quit' #noinspection PyUnusedLocal def func(self, invoker,", "if not parsed_cmd.arguments: raise CommandError('Go through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not", "as you. \"\"\" name = 'say' def func(self, invoker, parsed_cmd): # The sentence", "'.join(obj.aliases) if obj.location: attributes_str += ' Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str", "CommandError(\"You look around, but can't find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker) if not", "= invoker.location if location: if invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal Admin Commands:'", "raise CommandError(cant_enter_msg) # Determine where entering the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker)", "= ['l'] def get_appearance(self, obj_match, invoker): \"\"\" The 'look' command always shows an", "service.global_cmd_table ) location = invoker.location if location: if invoker.is_admin() and location.local_admin_command_table: buf +=", "invoker): \"\"\" Shows the object as it were examined. \"\"\" attributes_str = '", "'examine' aliases = ['ex', 'exa'] def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: #", "invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table )", "obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += '", "proxy. :param PlayerObject invoker: The player who ran the command. \"\"\" accounts =", "to everyone. \"\"\" import json import settings from src.daemons.server.commands.command import BaseCommand from src.daemons.server.commands.exceptions", "appearance, regardless of whether the player is an admin or not. \"\"\" name", "obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\" Shows the object as it were examined.", "\"\"\" A REALLY basic WHO list. \"\"\" name = 'who' #noinspection PyUnusedLocal def", "service = invoker.mud_service # Buffer to send to user. buf = '' if", "+= ' Location: %s\\n' % obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone: %s\\n'", "' Parent: %s (%s)\\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str += ' Aliases:", "self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\" Shows the object", "proxy gets back to us on who is connected, this callback triggers. :param", "# No arguments means defaulting to 'here'. if not invoker.location: # This shouldn't", "if not obj_match: raise CommandError('No matching object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance)", "says '%s'\" % (invoker.name, speech) # What the invoker sees. self_str = u\"You", ") if location.local_command_table: buf += '\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf)", "results: The details returned by the proxy. :param PlayerObject invoker: The player who", ":returns: A string list of commands in the table. \"\"\" buf = ''", "who ran the command. \"\"\" accounts = results['accounts'] retval = \"Player\\n\" for account", "parsed_cmd): service = invoker.mud_service # Buffer to send to user. buf = ''", "is connected, so the mud server has to ask. This is handled through", "% leave_from_name) invoker.move_to(leave_to) class CmdCommands(BaseCommand): \"\"\" Lists a break-down of available commands. Takes", "connected, so the mud server has to ask. This is handled through a", "to enter an object. \"\"\" name = 'enter' def func(self, invoker, parsed_cmd): if", "% nplayers invoker.emit_to(retval) class CmdSay(BaseCommand): \"\"\" Communicate with people in the same room", "Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n' % obj.description if obj.internal_description:", "self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\" Given a CommandTable instance, return", "location: if invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal Admin Commands:' buf += self._buffer_command_table(", "if not parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise", "examine, aside from always getting the object's normal appearance, regardless of whether the", "a normal player, this will simply return the normal description. :rtype: str :returns:", "invoker, parsed_cmd): \"\"\" The proxy has all of the details on who is", "user. buf = '' if invoker.is_admin(): buf += '\\nGlobal Admin Commands:' buf +=", "def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: # No arguments means defaulting to", "if invoker.is_admin(): buf += '\\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf", "if location.local_command_table: buf += '\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def", "DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\" name = 'examine' aliases =", "as it were examined. \"\"\" attributes_str = ' Parent: %s (%s)\\n' % (obj.parent,", "func(self, invoker, parsed_cmd): location = invoker.location can_leave, cant_leave_msg = location.can_object_leave(invoker) if not can_leave:", "(name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to traverse an exit. \"\"\" name =", "object's appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY basic WHO list.", "has all of the details on who is connected, so the mud server", "a very nerdy examine display that shows an object's un-parsed name/description, and attributes.", "return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker): \"\"\" Shows the", "callback. \"\"\" service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results,", "PlayerObject invoker: The player who ran the command. \"\"\" accounts = results['accounts'] retval", "invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This asks the proxy to disconnect any sessions", "invoker.location if location: if invoker.is_admin() and location.local_admin_command_table: buf += '\\nLocal Admin Commands:' buf", "name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to", "dott version identifier. Currently a git commit hash. \"\"\" name = 'version' #noinspection", "that shows an object's un-parsed name/description, and attributes. If the invoker is a", "u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from", "identifier. Currently a git commit hash. \"\"\" name = 'version' #noinspection PyUnusedLocal def", "enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to leave an object. \"\"\" name =", "\"\"\" The 'look' command always shows an object's normal appearance, despite whether the", "invoker): \"\"\" Once the proxy gets back to us on who is connected,", "object. \"\"\" name = 'examine' aliases = ['ex', 'exa'] def func(self, invoker, parsed_cmd):", "CmdCommands(BaseCommand): \"\"\" Lists a break-down of available commands. Takes into account your location's", "objects to see. speech_str = u\"%s says '%s'\" % (invoker.name, speech) # What", "the original object's name for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter", "raise CommandError('You must specify an object to examine.') obj_match = invoker.contextual_object_search(user_query) if not", "obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You look around, but can't find", "break-down of available commands. Takes into account your location's command table (if applicable),", "any sessions that are currently # controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, )", "user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand):", "attributes_str += ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n' %", ":param CommandTable table: The command table whose commands to list. :rtype: str :returns:", "return buf class CmdLook(CmdExamine): \"\"\" Synonymous with examine, aside from always getting the", "invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker): \"\"\" Checks to see whether the invoker", "this will simply return the normal description. :rtype: str :returns: The object's appearance,", "# What the invoker sees. self_str = u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str,", "happen, but... raise CommandError('You appear to be nowhere. Bummer.') user_query = 'here' else:", "This shouldn't ever happen, but... raise CommandError('You appear to be nowhere. Bummer.') user_query", "exit. \"\"\" name = 'go' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise", "\"-\" * 78 buf += \"\\n %s version %s\\n\" % ( settings.GAME_NAME, settings.VERSION", "on who is connected, this callback triggers. :param dict results: The details returned", "= service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): \"\"\" Once the proxy gets", "an object's un-parsed name/description, and attributes. If the invoker is a normal player,", "server has to ask. This is handled through a deferred and a callback.", "func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: # No arguments means defaulting to 'here'.", "to examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching object found.')", "to list. :rtype: str :returns: A string list of commands in the table.", "+= ' Internal Description: %s\\n' % obj.internal_description if obj.attributes: attributes_str += '\\n### ATTRIBUTES", "to us on who is connected, this callback triggers. :param dict results: The", "invoker, parsed_cmd): # The sentence to speak. speech = u' '.join(parsed_cmd.arguments) # Presentational", "ran the command. \"\"\" accounts = results['accounts'] retval = \"Player\\n\" for account in", "must specify an object to examine.') obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise", "obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to) class CmdLeave(BaseCommand): \"\"\" Attempts to leave", "% speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from the game. \"\"\"", "nplayers = len(accounts) if nplayers == 1: retval += 'One player logged in.'", "normal description. :rtype: str :returns: The object's appearance, from the invoker's perspective. \"\"\"", "but... raise CommandError('You appear to be nowhere. Bummer.') user_query = 'here' else: user_query", "class CmdCommands(BaseCommand): \"\"\" Lists a break-down of available commands. Takes into account your", "PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\" The proxy has all of the details", "'\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location = invoker.location if location: if", "%s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n' % obj.description if obj.internal_description: attributes_str", "buf += ' %s' % cmd.name return buf class CmdLook(CmdExamine): \"\"\" Synonymous with", "\"\"\" Shows the dott version identifier. Currently a git commit hash. \"\"\" name", "if not can_enter: raise CommandError(cant_enter_msg) # Determine where entering the object puts us.", "= u' '.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring objects to see. speech_str", "invoker, parsed_cmd): invoker.emit_to(\"Quitting...\") service = invoker.mud_service # This asks the proxy to disconnect", "'\\n### ATTRIBUTES ###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" %", "to traverse an exit. \"\"\" name = 'go' def func(self, invoker, parsed_cmd): if", "invoker): \"\"\" Checks to see whether the invoker is an admin. If so,", "= ' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must specify an object to", "details on who is connected, so the mud server has to ask. This", "command. \"\"\" accounts = results['accounts'] retval = \"Player\\n\" for account in accounts: retval", "The sentence to speak. speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement for other", "The player who ran the command. \"\"\" accounts = results['accounts'] retval = \"Player\\n\"", "# controlling this object. service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows the", "Determine where leaving the object puts us. leave_to = location.determine_leave_destination(invoker) # Use the", "service.proxyamp.callRemote( DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows the dott version identifier. Currently", "obj.internal_description: attributes_str += ' Internal Description: %s\\n' % obj.internal_description if obj.attributes: attributes_str +=", "DisconnectSessionsOnObjectCmd, object_id=invoker.id, ) class CmdVersion(BaseCommand): \"\"\" Shows the dott version identifier. Currently a", "json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand): \"\"\"", "CmdEnter(BaseCommand): \"\"\" Attempts to enter an object. \"\"\" name = 'enter' def func(self,", "the object puts us. leave_to = location.determine_leave_destination(invoker) # Use the original object's name", "buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf += '\\nLocal Commands:' buf +=", "= \"Player\\n\" for account in accounts: retval += \" %s\\n\" % account nplayers", "an object. \"\"\" name = 'examine' aliases = ['ex', 'exa'] def func(self, invoker,", "= len(accounts) if nplayers == 1: retval += 'One player logged in.' else:", "the details on who is connected, so the mud server has to ask.", "\"\"\" Given a CommandTable instance, return a string that lists the commands in", "* 78 buf += \"\\n %s version %s\\n\" % ( settings.GAME_NAME, settings.VERSION )", "parsed_cmd.arguments: raise CommandError('Enter what?') obj_to_enter = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_enter: raise CommandError(\"You look", "obj_to_enter: raise CommandError(\"You look around, but can't find it.\") can_enter, cant_enter_msg = obj_to_enter.can_object_enter(invoker)", "obj.description if obj.internal_description: attributes_str += ' Internal Description: %s\\n' % obj.internal_description if obj.attributes:", "object's appearance, from the invoker's perspective. \"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else:", "buf += '\\nGlobal Admin Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal", "if not can_leave: raise CommandError(cant_leave_msg) # Determine where leaving the object puts us.", "to 'here'. if not invoker.location: # This shouldn't ever happen, but... raise CommandError('You", "a CommandTable instance, return a string that lists the commands in the table.", "\"\"\" Examines an object. \"\"\" name = 'examine' aliases = ['ex', 'exa'] def", "'say' def func(self, invoker, parsed_cmd): # The sentence to speak. speech = u'", "name = 'enter' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Enter what?')", "display that shows an object's un-parsed name/description, and attributes. If the invoker is", "table whose commands to list. :rtype: str :returns: A string list of commands", "for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You enter %s\" % enter_to_name) invoker.move_to(enter_to)", "= invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type == 'exit':", "% (name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to traverse an exit. \"\"\" name", "of whether the player is an admin or not. \"\"\" name = 'look'", "accounts: retval += \" %s\\n\" % account nplayers = len(accounts) if nplayers ==", "invoker is a player or admin. :rtype: str :returns: The object's appearance. \"\"\"", "Aliases: %s\\n' % ', '.join(obj.aliases) if obj.location: attributes_str += ' Location: %s\\n' %", "through which exit?') obj_to_traverse = invoker.contextual_object_search(parsed_cmd.argument_string) if not obj_to_traverse: raise CommandError(\"Destination unknown.\") if", "\"\"\" name = 'version' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): buf = \"-\"", "= ' Parent: %s (%s)\\n' % (obj.parent, obj.base_type) if obj.aliases: attributes_str += '", "deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback, invoker) def _wholist_callback(self, results, invoker): \"\"\" Once the proxy", "u' '.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring objects to see. speech_str =", "Commands:' buf += self._buffer_command_table( service.global_admin_cmd_table ) buf += '\\nGlobal Commands:' buf += self._buffer_command_table(", "_wholist_callback(self, results, invoker): \"\"\" Once the proxy gets back to us on who", "and attributes. If the invoker is a normal player, this will simply return", "through a deferred and a callback. \"\"\" service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd)", "in accounts: retval += \" %s\\n\" % account nplayers = len(accounts) if nplayers", "exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from the game. \"\"\" name = 'quit'", "the table. \"\"\" buf = '' for cmd in table.commands: buf += '", "if not obj_to_traverse: raise CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't", "# Determine where entering the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use", "unknown.\") if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look like an exit.\") obj_to_traverse.pass_object_through(invoker)", "Use the original object's name for the user message. enter_to_name = obj_to_enter.get_appearance_name(invoker) invoker.emit_to(\"You", "+= '\\nLocal Commands:' buf += self._buffer_command_table( location.local_command_table ) invoker.emit_to(buf) def _buffer_command_table(self, table): \"\"\"", "obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY basic WHO list. \"\"\" name = 'who'", "table. \"\"\" buf = '' for cmd in table.commands: buf += ' %s'", "aliases = ['l'] def get_appearance(self, obj_match, invoker): \"\"\" The 'look' command always shows", "CommandError('No matching object found.') appearance = self.get_appearance(obj_match, invoker) invoker.emit_to(appearance) def get_appearance(self, obj, invoker):", "leaving the object puts us. leave_to = location.determine_leave_destination(invoker) # Use the original object's", "is a normal player, this will simply return the normal description. :rtype: str", "object's normal appearance, despite whether the invoker is a player or admin. :rtype:", "in table.commands: buf += ' %s' % cmd.name return buf class CmdLook(CmdExamine): \"\"\"", "retval = \"Player\\n\" for account in accounts: retval += \" %s\\n\" % account", "Shows the object as it were examined. \"\"\" attributes_str = ' Parent: %s", "to speak. speech = u' '.join(parsed_cmd.arguments) # Presentational arrangement for other neighboring objects", "location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) # Determine where leaving the object puts", "return \"%s\\n%s\" % (name, attributes_str) class CmdGo(BaseCommand): \"\"\" Attempts to traverse an exit.", "'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd): \"\"\" The proxy has all of", "invoker.mud_service # This asks the proxy to disconnect any sessions that are currently", "'%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from the game.", "if not parsed_cmd.arguments: # No arguments means defaulting to 'here'. if not invoker.location:", "\"\"\" Shows the object as it were examined. \"\"\" attributes_str = ' Parent:", "and location.local_admin_command_table: buf += '\\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if", "basic WHO list. \"\"\" name = 'who' #noinspection PyUnusedLocal def func(self, invoker, parsed_cmd):", "###\\n' attributes_str += json.dumps(obj.attributes, indent=3) name = obj.get_appearance_name(invoker=invoker) return \"%s\\n%s\" % (name, attributes_str)", "invoker sees. self_str = u\"You say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class", "CommandError(\"Destination unknown.\") if not obj_to_traverse.base_type == 'exit': invoker.emit_to(\"That doesn't look like an exit.\")", "details returned by the proxy. :param PlayerObject invoker: The player who ran the", "obj_match = invoker.contextual_object_search(user_query) if not obj_match: raise CommandError('No matching object found.') appearance =", "to user. buf = '' if invoker.is_admin(): buf += '\\nGlobal Admin Commands:' buf", "'here' else: user_query = ' '.join(parsed_cmd.arguments) if not user_query: raise CommandError('You must specify", "This is handled through a deferred and a callback. \"\"\" service = invoker.mud_service", "CommandError from src.daemons.server.protocols.proxyamp import WhoConnectedCmd, DisconnectSessionsOnObjectCmd class CmdExamine(BaseCommand): \"\"\" Examines an object. \"\"\"", "despite whether the invoker is a player or admin. :rtype: str :returns: The", "say '%s'\" % speech invoker.location.emit_to_contents(speech_str, exclude=[invoker]) invoker.emit_to(self_str) class CmdQuit(BaseCommand): \"\"\" Disconnects from the", "us on who is connected, this callback triggers. :param dict results: The details", "The object's appearance. \"\"\" return obj_match.get_appearance(invoker) class CmdWho(BaseCommand): \"\"\" A REALLY basic WHO", "to see. speech_str = u\"%s says '%s'\" % (invoker.name, speech) # What the", "buf += '\\nLocal Admin Commands:' buf += self._buffer_command_table( location.local_admin_command_table ) if location.local_command_table: buf", "= 'go' def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: raise CommandError('Go through which", "class CmdWho(BaseCommand): \"\"\" A REALLY basic WHO list. \"\"\" name = 'who' #noinspection", "% obj.location.get_appearance_name(invoker) if obj.zone: attributes_str += ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str +=", "cant_leave_msg = location.can_object_leave(invoker) if not can_leave: raise CommandError(cant_leave_msg) # Determine where leaving the", "logged in.' else: retval += '%d players logged in.' % nplayers invoker.emit_to(retval) class", "\"\"\" Lists a break-down of available commands. Takes into account your location's command", "= '' for cmd in table.commands: buf += ' %s' % cmd.name return", "buf += '\\nGlobal Commands:' buf += self._buffer_command_table( service.global_cmd_table ) location = invoker.location if", "\"\"\" if invoker.is_admin(): return self.get_examine_appearance(obj, invoker) else: return obj.get_appearance(invoker) def get_examine_appearance(self, obj, invoker):", "whether the player is an admin or not. \"\"\" name = 'look' aliases", "a git commit hash. \"\"\" name = 'version' #noinspection PyUnusedLocal def func(self, invoker,", "a deferred and a callback. \"\"\" service = invoker.mud_service deferred = service.proxyamp.callRemote(WhoConnectedCmd) deferred.addCallback(self._wholist_callback,", "obj.zone: attributes_str += ' Zone: %s\\n' % obj.zone.get_appearance_name(invoker) attributes_str += ' Description: %s\\n'", "aliases = ['ex', 'exa'] def func(self, invoker, parsed_cmd): if not parsed_cmd.arguments: # No", "invoker: The player who ran the command. \"\"\" accounts = results['accounts'] retval =", "the object puts us. enter_to = obj_to_enter.determine_enter_destination(invoker) # Use the original object's name" ]
[ "= frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying \"' + src +", "= os.path.join(src_folder, scrub(src) + \".py\") src_script_path = src_folder and os.path.join(src_folder, scrub(src) + \".js\")", "src_folder and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder = module_path and os.path.join(module_path, \"report\", scrub(tgt))", "tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) +", "_ from frappe.utils import cint import shutil, os from frappe.modules import scrub, get_module_path", "tgt, '\"') module_path = get_module_path(module) src_folder = module_path and os.path.join(module_path, \"report\", scrub(src)) src_path", "= os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\")", "Technologies and contributors # For license information, please see license.txt from __future__ import", "see license.txt from __future__ import unicode_literals import frappe from frappe import _ from", "src + '\" to \"' + tgt, '\"') module_path = get_module_path(module) src_folder =", "and contributors # For license information, please see license.txt from __future__ import unicode_literals", "import frappe from frappe import _ from frappe.utils import cint import shutil, os", "\".py\") src_script_path = src_folder and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder = module_path and", "os.path.join(src_folder, scrub(src) + \".js\") tgt_folder = module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path =", "scrub(src) + \".js\") tgt_folder = module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder,", "and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path = tgt_folder", "import shutil, os from frappe.modules import scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview", "= module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path", "'\"') module_path = get_module_path(module) src_folder = module_path and os.path.join(module_path, \"report\", scrub(src)) src_path =", "os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src) + \".py\") src_script_path = src_folder and", "\".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path, tgt_path) shutil.copyfile(src_script_path, tgt_script_path)", "unicode_literals import frappe from frappe import _ from frappe.utils import cint import shutil,", "copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\",", "= src_folder and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder = module_path and os.path.join(module_path, \"report\",", "copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying", "from frappe.utils import cint import shutil, os from frappe.modules import scrub, get_module_path def", "\"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying \"'", "from frappe.modules import scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage:", "Copyright (c) 2013, GreyCube Technologies and contributors # For license information, please see", "print('Copying \"' + src + '\" to \"' + tgt, '\"') module_path =", "contributors # For license information, please see license.txt from __future__ import unicode_literals import", "# For license information, please see license.txt from __future__ import unicode_literals import frappe", "os.path.join(src_folder, scrub(src) + \".py\") src_script_path = src_folder and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder", "import unicode_literals import frappe from frappe import _ from frappe.utils import cint import", "frappe.utils import cint import shutil, os from frappe.modules import scrub, get_module_path def copy_report(", "module_path = get_module_path(module) src_folder = module_path and os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder,", "src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name", "license information, please see license.txt from __future__ import unicode_literals import frappe from frappe", "get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc", "For license information, please see license.txt from __future__ import unicode_literals import frappe from", "+ '\" to \"' + tgt, '\"') module_path = get_module_path(module) src_folder = module_path", "doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying \"' + src", "scrub(src)) src_path = os.path.join(src_folder, scrub(src) + \".py\") src_script_path = src_folder and os.path.join(src_folder, scrub(src)", "from __future__ import unicode_literals import frappe from frappe import _ from frappe.utils import", "module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src))", "__future__ import unicode_literals import frappe from frappe import _ from frappe.utils import cint", "frappe.modules import scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\",", "tgt_folder = module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\")", "= module_path and os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src) + \".py\") src_script_path", "please see license.txt from __future__ import unicode_literals import frappe from frappe import _", "os from frappe.modules import scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ):", "scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\"", "def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc =", "module_path and os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src) + \".py\") src_script_path =", "2013, GreyCube Technologies and contributors # For license information, please see license.txt from", "): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert()", "+ src + '\" to \"' + tgt, '\"') module_path = get_module_path(module) src_folder", "shutil, os from frappe.modules import scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\",", "GreyCube Technologies and contributors # For license information, please see license.txt from __future__", "os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path,", "cint import shutil, os from frappe.modules import scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\",", "tgt doc.insert() frappe.db.commit() print('Copying \"' + src + '\" to \"' + tgt,", "and os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src) + \".py\") src_script_path = src_folder", "from frappe import _ from frappe.utils import cint import shutil, os from frappe.modules", "os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path = tgt_folder and", "+ \".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path, tgt_path) shutil.copyfile(src_script_path,", "(c) 2013, GreyCube Technologies and contributors # For license information, please see license.txt", "scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt)", "doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying \"' + src + '\" to \"'", "get_module_path(module) src_folder = module_path and os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src) +", "+ tgt, '\"') module_path = get_module_path(module) src_folder = module_path and os.path.join(module_path, \"report\", scrub(src))", "\"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src) + \".py\") src_script_path = src_folder and os.path.join(src_folder,", "import scrub, get_module_path def copy_report( module=\"NPro\", src=\"Interviews\", tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\",", "tgt=\"Interview Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name =", "+ \".js\") tgt_folder = module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt)", "\"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying \"' +", "frappe.db.commit() print('Copying \"' + src + '\" to \"' + tgt, '\"') module_path", "'\" to \"' + tgt, '\"') module_path = get_module_path(module) src_folder = module_path and", "src_script_path = src_folder and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder = module_path and os.path.join(module_path,", "license.txt from __future__ import unicode_literals import frappe from frappe import _ from frappe.utils", "import _ from frappe.utils import cint import shutil, os from frappe.modules import scrub,", "tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path, tgt_path) shutil.copyfile(src_script_path, tgt_script_path) print(src_path,", "src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying \"' + src + '\" to", "\".js\") tgt_folder = module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) +", "src_folder = module_path and os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src) + \".py\")", "module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path =", "= get_module_path(module) src_folder = module_path and os.path.join(module_path, \"report\", scrub(src)) src_path = os.path.join(src_folder, scrub(src)", "\"' + src + '\" to \"' + tgt, '\"') module_path = get_module_path(module)", "information, please see license.txt from __future__ import unicode_literals import frappe from frappe import", "\"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert() frappe.db.commit()", "frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt doc.insert() frappe.db.commit() print('Copying \"' + src + '\"", "scrub(src) + \".py\") src_script_path = src_folder and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder =", "tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path, tgt_path) shutil.copyfile(src_script_path, tgt_script_path) print(src_path, tgt_path) print(src_script_path,", "import cint import shutil, os from frappe.modules import scrub, get_module_path def copy_report( module=\"NPro\",", "and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path, tgt_path) shutil.copyfile(src_script_path, tgt_script_path) print(src_path, tgt_path) print(src_script_path, tgt_script_path)", "+ \".py\") src_script_path = src_folder and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder = module_path", "= tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path, tgt_path) shutil.copyfile(src_script_path, tgt_script_path) print(src_path, tgt_path)", "= tgt doc.insert() frappe.db.commit() print('Copying \"' + src + '\" to \"' +", "to \"' + tgt, '\"') module_path = get_module_path(module) src_folder = module_path and os.path.join(module_path,", "\"' + tgt, '\"') module_path = get_module_path(module) src_folder = module_path and os.path.join(module_path, \"report\",", "# Copyright (c) 2013, GreyCube Technologies and contributors # For license information, please", "scrub(tgt) + \".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder, scrub(tgt) + \".js\") shutil.copyfile(src_path, tgt_path)", "frappe from frappe import _ from frappe.utils import cint import shutil, os from", "Results\", ): \"\"\"usage: copy_report(\"NPro\", \"src\", \"tgt\")\"\"\" doc = frappe.copy_doc(frappe.get_doc(\"Report\", src)) doc.report_name = tgt", "frappe import _ from frappe.utils import cint import shutil, os from frappe.modules import", "src_path = os.path.join(src_folder, scrub(src) + \".py\") src_script_path = src_folder and os.path.join(src_folder, scrub(src) +", "doc.insert() frappe.db.commit() print('Copying \"' + src + '\" to \"' + tgt, '\"')", "\"report\", scrub(tgt)) tgt_path = os.path.join(tgt_folder, scrub(tgt) + \".py\") tgt_script_path = tgt_folder and os.path.join(tgt_folder,", "and os.path.join(src_folder, scrub(src) + \".js\") tgt_folder = module_path and os.path.join(module_path, \"report\", scrub(tgt)) tgt_path" ]
[ "from mazikeen.GeneratorException import GeneratorException def generateMakedirs(data): if not isinstance(data, str): raise GeneratorException(\"'makedirs' block", "from mazikeen.MakedirsBlock import MakedirsBlock from mazikeen.GeneratorException import GeneratorException def generateMakedirs(data): if not isinstance(data,", "def generateMakedirs(data): if not isinstance(data, str): raise GeneratorException(\"'makedirs' block not recognized\") return MakedirsBlock(data)", "mazikeen.MakedirsBlock import MakedirsBlock from mazikeen.GeneratorException import GeneratorException def generateMakedirs(data): if not isinstance(data, str):", "MakedirsBlock from mazikeen.GeneratorException import GeneratorException def generateMakedirs(data): if not isinstance(data, str): raise GeneratorException(\"'makedirs'", "mazikeen.GeneratorException import GeneratorException def generateMakedirs(data): if not isinstance(data, str): raise GeneratorException(\"'makedirs' block not", "import GeneratorException def generateMakedirs(data): if not isinstance(data, str): raise GeneratorException(\"'makedirs' block not recognized\")", "GeneratorException def generateMakedirs(data): if not isinstance(data, str): raise GeneratorException(\"'makedirs' block not recognized\") return", "import MakedirsBlock from mazikeen.GeneratorException import GeneratorException def generateMakedirs(data): if not isinstance(data, str): raise" ]
[ "ymin, width, height) face_info = {\"top\": ymin, \"left\": xmin, \"width\": width, \"height\": height}", "with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of", "\"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from inference import run from", "file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from inference import run from lib.utils.oom", "str(idx) + '.png', annotated_image) return face_info, center_point if __name__ == \"__main__\": file =", "results.detections: print(\"Nose tip:\") xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin", "* h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin + height),", "h) print(\"center point is \", int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection)", "detections of each face. annotated_image = image.copy() for detection in results.detections: print(\"Nose tip:\")", "point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w), int(point.y *", "detection in results.detections: print(\"Nose tip:\") xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin *", "<filename>face_detect.py import cv2 import mediapipe as mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils", "cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return face_info, center_point if __name__", "\"width\": width, \"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point(", "int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), )", "255, 0), thickness=5, ) print(xmin, ymin, width, height) face_info = {\"top\": ymin, \"left\":", "For static images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with", ") center_point = int(point.x * w), int(point.y * h) print(\"center point is \",", "run from lib.utils.oom import free_up_memory image = cv2.imread(file) face_info, points = get_face_key_point(image) img_crop", "from face_crop import get_crop_img from inference import run from lib.utils.oom import free_up_memory image", "= {\"top\": ymin, \"left\": xmin, \"width\": width, \"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP", "circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w, _", "= mp.solutions.drawing_utils # For static images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1)", "* h) print(\"center point is \", int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image,", "mp.solutions.drawing_utils # For static images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def", "= mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w), int(point.y * h)", "point is \", int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL)", "w, _ = image.shape print(image.shape) # Convert the BGR image to RGB and", "= face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of each face. annotated_image = image.copy()", "of each face. annotated_image = image.copy() for detection in results.detections: print(\"Nose tip:\") xmin,", "int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin +", "face_info = {\"top\": ymin, \"left\": xmin, \"width\": width, \"height\": height} print( mp_face_detection.get_key_point( detection,", "face. annotated_image = image.copy() for detection in results.detections: print(\"Nose tip:\") xmin, ymin, width,", "int(point.y * h) print(\"center point is \", int(point.x * w), int(point.y * h))", "width, height) face_info = {\"top\": ymin, \"left\": xmin, \"width\": width, \"height\": height} print(", "width, ymin + height), color=(255, 255, 0), thickness=5, ) print(xmin, ymin, width, height)", "mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w, _ = image.shape print(image.shape) #", "is \", int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\",", "center_point = int(point.x * w), int(point.y * h) print(\"center point is \", int(point.x", "h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin + height), color=(255,", "\"left\": xmin, \"width\": width, \"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point", "w), int(point.y * h) print(\"center point is \", int(point.x * w), int(point.y *", "cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return face_info,", "def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w, _ =", "* w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle(", "print(image.shape) # Convert the BGR image to RGB and process it with MediaPipe", ") as face_detection: h, w, _ = image.shape print(image.shape) # Convert the BGR", "ymin, \"left\": xmin, \"width\": width, \"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) )", "print(\"center point is \", int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\",", "# cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return face_info, center_point if __name__ ==", "process it with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face", "w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) #", "mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static images: IMAGE_FILES = []", "w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image,", "MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of each", "= image.shape print(image.shape) # Convert the BGR image to RGB and process it", "ymin + height), color=(255, 255, 0), thickness=5, ) print(xmin, ymin, width, height) face_info", "* h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' +", "{\"top\": ymin, \"left\": xmin, \"width\": width, \"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP )", "* w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width,", "h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx)", "center_point if __name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from", "image = cv2.imread(file) face_info, points = get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img", "height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height", "( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h),", "import run from lib.utils.oom import free_up_memory image = cv2.imread(file) face_info, points = get_face_key_point(image)", "cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return", "annotated_image) return face_info, center_point if __name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop", "drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection:", "return face_info, center_point if __name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import", "cv2.imread(file) face_info, points = get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img = run(img_crop)", "print(\"Nose tip:\") xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin *", "= mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static images: IMAGE_FILES = [] drawing_spec", ") cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin + height), color=(255, 255,", "0), thickness=5, ) print(xmin, ymin, width, height) face_info = {\"top\": ymin, \"left\": xmin,", "get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\", res_img) cv2.namedWindow(\"img_crop\", cv2.WINDOW_NORMAL)", "= \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from inference import run from lib.utils.oom import", "inference import run from lib.utils.oom import free_up_memory image = cv2.imread(file) face_info, points =", "= [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 )", "model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w, _ = image.shape print(image.shape) # Convert", "mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point =", "= image.copy() for detection in results.detections: print(\"Nose tip:\") xmin, ymin, width, height =", "'.png', annotated_image) return face_info, center_point if __name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from", "import cv2 import mediapipe as mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils #", "detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w), int(point.y * h) print(\"center point", "__name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from inference import", "w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin", "int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image'", "cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image)", "cv2.COLOR_BGR2RGB)) # Draw face detections of each face. annotated_image = image.copy() for detection", "= cv2.imread(file) face_info, points = get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img =", "and process it with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw", "RGB and process it with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) #", "width, height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w),", "import free_up_memory image = cv2.imread(file) face_info, points = get_face_key_point(image) img_crop = get_crop_img(image, face_info,", ") ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w),", "xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width", "Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of each face. annotated_image", "height) face_info = {\"top\": ymin, \"left\": xmin, \"width\": width, \"height\": height} print( mp_face_detection.get_key_point(", "points = get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\", res_img)", "image.shape print(image.shape) # Convert the BGR image to RGB and process it with", "+ str(idx) + '.png', annotated_image) return face_info, center_point if __name__ == \"__main__\": file", "\"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from inference import run from lib.utils.oom import free_up_memory", "width, \"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection,", "int(point.x * w), int(point.y * h) print(\"center point is \", int(point.x * w),", "== \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from inference import run", "* w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0)", "tip:\") xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h),", "pt1=(xmin, ymin), pt2=(xmin + width, ymin + height), color=(255, 255, 0), thickness=5, )", "height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP )", "mp_drawing = mp.solutions.drawing_utils # For static images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1,", "BGR image to RGB and process it with MediaPipe Face Detection. results =", "pt2=(xmin + width, ymin + height), color=(255, 255, 0), thickness=5, ) print(xmin, ymin,", "ymin), pt2=(xmin + width, ymin + height), color=(255, 255, 0), thickness=5, ) print(xmin,", "* h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin),", "h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin", "print(xmin, ymin, width, height) face_info = {\"top\": ymin, \"left\": xmin, \"width\": width, \"height\":", "int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin +", "\"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP", "import get_crop_img from inference import run from lib.utils.oom import free_up_memory image = cv2.imread(file)", "image.copy() for detection in results.detections: print(\"Nose tip:\") xmin, ymin, width, height = (", "get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\", res_img) cv2.namedWindow(\"img_crop\", cv2.WINDOW_NORMAL) cv2.imshow(\"img_crop\", res_img) cv2.waitKey(0)", "xmin, \"width\": width, \"height\": height} print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point =", "get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w, _ = image.shape", "# Convert the BGR image to RGB and process it with MediaPipe Face", "face detections of each face. annotated_image = image.copy() for detection in results.detections: print(\"Nose", "mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) +", "thickness=5, ) print(xmin, ymin, width, height) face_info = {\"top\": ymin, \"left\": xmin, \"width\":", "Convert the BGR image to RGB and process it with MediaPipe Face Detection.", "from lib.utils.oom import free_up_memory image = cv2.imread(file) face_info, points = get_face_key_point(image) img_crop =", "results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of each face. annotated_image =", "detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x", "annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return face_info, center_point", "_ = image.shape print(image.shape) # Convert the BGR image to RGB and process", "height), color=(255, 255, 0), thickness=5, ) print(xmin, ymin, width, height) face_info = {\"top\":", "cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return face_info, center_point if __name__ == \"__main__\":", "= int(point.x * w), int(point.y * h) print(\"center point is \", int(point.x *", "face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of each face. annotated_image = image.copy() for", "annotated_image = image.copy() for detection in results.detections: print(\"Nose tip:\") xmin, ymin, width, height", "# Draw face detections of each face. annotated_image = image.copy() for detection in", "\", int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image)", "free_up_memory image = cv2.imread(file) face_info, points = get_face_key_point(image) img_crop = get_crop_img(image, face_info, points)", "img_crop = get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\", res_img) cv2.namedWindow(\"img_crop\", cv2.WINDOW_NORMAL) cv2.imshow(\"img_crop\",", "as mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static images: IMAGE_FILES", "it with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections", "* w), int(point.y * h) print(\"center point is \", int(point.x * w), int(point.y", "from inference import run from lib.utils.oom import free_up_memory image = cv2.imread(file) face_info, points", "face_info, points = get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\",", "cv2.rectangle( annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin + height), color=(255, 255, 0),", "+ height), color=(255, 255, 0), thickness=5, ) print(xmin, ymin, width, height) face_info =", "mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w), int(point.y * h) print(\"center point is", "min_detection_confidence=0.5 ) as face_detection: h, w, _ = image.shape print(image.shape) # Convert the", ") point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w), int(point.y", "int(point.x * w), int(point.y * h)) mp_drawing.draw_detection(annotated_image, detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) #", "[] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as", "detection) cv2.namedWindow(\"facemesh\", cv2.WINDOW_NORMAL) cv2.imshow(\"facemesh\", annotated_image) # cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png',", "# cv2.waitKey(0) # cv2.imwrite('/tmp/annotated_image' + str(idx) + '.png', annotated_image) return face_info, center_point if", "in results.detections: print(\"Nose tip:\") xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin * w),", "= get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\", res_img) cv2.namedWindow(\"img_crop\", cv2.WINDOW_NORMAL) cv2.imshow(\"img_crop\", res_img)", "face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\", res_img) cv2.namedWindow(\"img_crop\", cv2.WINDOW_NORMAL) cv2.imshow(\"img_crop\", res_img) cv2.waitKey(0) free_up_memory()", "= ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height *", "= get_face_key_point(image) img_crop = get_crop_img(image, face_info, points) res_img = run(img_crop) cv2.imwrite(\"temp3.jpeg\", res_img) cv2.namedWindow(\"img_crop\",", "import mediapipe as mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static", "# For static images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image):", "mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x * w), int(point.y * h) print(\"center", "image to RGB and process it with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image,", "+ '.png', annotated_image) return face_info, center_point if __name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\"", "if __name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img from inference", "+ width, ymin + height), color=(255, 255, 0), thickness=5, ) print(xmin, ymin, width,", "Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) # Draw face detections of each face.", "mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point = int(point.x *", "the BGR image to RGB and process it with MediaPipe Face Detection. results", "lib.utils.oom import free_up_memory image = cv2.imread(file) face_info, points = get_face_key_point(image) img_crop = get_crop_img(image,", "mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static images: IMAGE_FILES = [] drawing_spec =", "face_info, center_point if __name__ == \"__main__\": file = \"/home/whm/workspace/segmentataion/u-2-net-portrait/dataset/demo/7.jpg\" from face_crop import get_crop_img", "face_crop import get_crop_img from inference import run from lib.utils.oom import free_up_memory image =", "= mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h,", "get_crop_img from inference import run from lib.utils.oom import free_up_memory image = cv2.imread(file) face_info,", "mediapipe as mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static images:", "mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For static images: IMAGE_FILES =", "Draw face detections of each face. annotated_image = image.copy() for detection in results.detections:", "IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5", "int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width * w), int(detection.location_data.relative_bounding_box.height * h), ) cv2.rectangle( annotated_image, pt1=(xmin,", "h, w, _ = image.shape print(image.shape) # Convert the BGR image to RGB", "print( mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) ) point = mp_face_detection.get_key_point( detection, mp_face_detection.FaceKeyPoint.NOSE_TIP ) center_point", "with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w, _ = image.shape print(image.shape)", "mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1, min_detection_confidence=0.5 ) as face_detection: h, w,", "as face_detection: h, w, _ = image.shape print(image.shape) # Convert the BGR image", "annotated_image, pt1=(xmin, ymin), pt2=(xmin + width, ymin + height), color=(255, 255, 0), thickness=5,", "for detection in results.detections: print(\"Nose tip:\") xmin, ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin", "ymin, width, height = ( int(detection.location_data.relative_bounding_box.xmin * w), int(detection.location_data.relative_bounding_box.ymin * h), int(detection.location_data.relative_bounding_box.width *", ") print(xmin, ymin, width, height) face_info = {\"top\": ymin, \"left\": xmin, \"width\": width,", "each face. annotated_image = image.copy() for detection in results.detections: print(\"Nose tip:\") xmin, ymin,", "static images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection(", "cv2 import mediapipe as mp mp_face_detection = mp.solutions.face_detection mp_drawing = mp.solutions.drawing_utils # For", "face_detection: h, w, _ = image.shape print(image.shape) # Convert the BGR image to", "to RGB and process it with MediaPipe Face Detection. results = face_detection.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))", "color=(255, 255, 0), thickness=5, ) print(xmin, ymin, width, height) face_info = {\"top\": ymin,", "images: IMAGE_FILES = [] drawing_spec = mp_drawing.DrawingSpec(thickness=1, circle_radius=1) def get_face_key_point(image): with mp_face_detection.FaceDetection( model_selection=1," ]
[ "original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1 time up and check", "Transpose the transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy =", "copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_desc)", "continue # Transpose copy 12 times and check substring of original for i", "'B', 'B#'] # Split notes of copy transpose_copy_notes = '' first_iterations = True", "# Read the original original = input() # Read the copy copy =", "if copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1 time", "= len(copy) original_len = len(original) if copy_len > original_len: return original_len - 1", "break # Read the original original = input() # Read the copy copy", "'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#'] # Split", "and num_copy == 0: break # Read the original original = input() #", "Get last char of original after length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy,", "= without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break", "-1 return notes[current_index + 1] def transpose_copy(copy_notes): # Declare the notes notes_asc =", "' ') + transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes # Infinity loop", "12 times down for i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy", "without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12 times and check substring", "'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#']", "not # or b last_char_of_substr = original[lp(copy, original)] if copy in original and", "return original_len - 1 return copy_len - 1 if original_len == copy_len else", "= input() # Check if copy is substring of original and last char", "copy 1 time up and check substring of original is_copy = False transposed_copy", "return char != '#' and char != 'b' def transpose_note(note, notes): if note", "def lp(copy, original): copy_len = len(copy) original_len = len(original) if copy_len > original_len:", "= len(original) if copy_len > original_len: return original_len - 1 return copy_len -", "from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last char of original after", "= int(line[0]) num_copy = int(line[1]) # Check if is end of program if", "')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue", "is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if", "# or b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break", "if num_copy == num_original and num_copy == 0: break # Read the original", "'E', 'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#'] # Split notes", "without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # #", "the notes notes_desc = ['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F',", "'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#'] # Split notes of", "Check 12 times down for i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy))", "substr and not # or b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy", "Infinity loop while True: # Read line line = input().split(' ') # Get", "not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)]", "transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original", "if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # # Puts", "original and last char is not # or b last_char_of_substr = original[lp(copy, original)]", "# Check if copy is substring of original and last char is not", "without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12", "original = input() # Read the copy copy = input() # Check if", "if is end of program if num_copy == num_original and num_copy == 0:", "') # Get the length of musics num_original = int(line[0]) num_copy = int(line[1])", "a copy check 1 time down if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy", "print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy,", "substring of original is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' '))", "# Declare the notes notes_asc = ['C', 'C#', 'D', 'D#', 'E', 'E#', 'F',", "# Get the length of musics num_original = int(line[0]) num_copy = int(line[1]) #", "num_copy = int(line[1]) # Check if is end of program if num_copy ==", "'): transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_desc) first_iterations", "'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db']", "transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces from transposition", "without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # If not a copy check", "copy_len def not_flat_or_sharp(char): return char != '#' and char != 'b' def transpose_note(note,", "note == 'E#': return 'F#' if note == 'B#': return 'C#' if note", "- 1: current_index = -1 return notes[current_index + 1] def transpose_copy(copy_notes): # Declare", "char is not # or b last_char_of_substr = original[lp(copy, original)] if copy in", "without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times down for", "if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy,", "first_iterations = True for note in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations", "char != '#' and char != 'b' def transpose_note(note, notes): if note ==", "not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12 times and check substring of original", "b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # If", "of musics num_original = int(line[0]) num_copy = int(line[1]) # Check if is end", "1: current_index = -1 return notes[current_index + 1] def transpose_copy(copy_notes): # Declare the", "if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12 times", "copy_len = len(copy) original_len = len(original) if copy_len > original_len: return original_len -", "down if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr =", "of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr and", "without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if", "('' if first_iterations else ' ') + transpose_note(note, notes_asc) first_iterations = False return", "notes_asc) first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes notes_desc", "if note == 'B': return 'C' current_index = notes.index(note) if current_index == len(notes)", "the length of musics num_original = int(line[0]) num_copy = int(line[1]) # Check if", "== 'E#': return 'F#' if note == 'B#': return 'C#' if note ==", "True break # If not a copy check 1 time down if not", "without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last char of original after length of", "of program if num_copy == num_original and num_copy == 0: break # Read", "line = input().split(' ') # Get the length of musics num_original = int(line[0])", "is substring of original and last char is not # or b last_char_of_substr", "original_len: return original_len - 1 return copy_len - 1 if original_len == copy_len", "if is substr and not # or b if without_spaces_transposed_copy in without_spaces_original and", "and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12 times and check substring of", "transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' '))", "copy check 1 time down if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy =", "transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last char of original after length", "original original = input() # Read the copy copy = input() # Check", "lp(copy, original): copy_len = len(copy) original_len = len(original) if copy_len > original_len: return", "loop while True: # Read line line = input().split(' ') # Get the", "'E', 'Eb', 'D', 'Db'] # Split notes of copy transpose_copy_notes = '' first_iterations", "')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy =", "of original after length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check", "transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last", "transpose_copy_notes # Infinity loop while True: # Read line line = input().split(' ')", "transpose_copy_down(copy_notes): # Declare the notes notes_desc = ['C', 'Cb', 'B', 'Bb', 'A', 'Ab',", "in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note,", "without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # # Puts the output print('S'", "num_original = int(line[0]) num_copy = int(line[1]) # Check if is end of program", "check 1 time down if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split('", "without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break #", "= True for note in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else", "copy is substring of original and last char is not # or b", "in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy =", "'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#'] # Split notes of copy transpose_copy_notes", "not_flat_or_sharp(last_char_of_substr): is_copy = True break # # Puts the output print('S' if is_copy", "copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_asc)", "False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr", "times down for i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy =", "# Transpose copy 1 time up and check substring of original is_copy =", "transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy", "of copy transpose_copy_notes = '' first_iterations = True for note in copy_notes.split(' '):", "if first_iterations else ' ') + transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes", "notes_asc = ['C', 'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#', 'A',", "note == 'B#': return 'C#' if note == 'B': return 'C' current_index =", "')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original", "print('S') continue # Transpose copy 12 times and check substring of original for", "notes notes_asc = ['C', 'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#',", "Split notes of copy transpose_copy_notes = '' first_iterations = True for note in", "''.join(transposed_copy.split(' ')) # Get last char of original after length of transposed copy", "'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db'] #", "down for i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy)", "for i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy", "while True: # Read line line = input().split(' ') # Get the length", "!= '#' and char != 'b' def transpose_note(note, notes): if note == 'E#':", "b last_char_of_substr = original[lp(copy, original)] if copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue", "print('S') continue # Transpose copy 1 time up and check substring of original", "or b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break #", "transpose_copy(copy_notes): # Declare the notes notes_asc = ['C', 'C#', 'D', 'D#', 'E', 'E#',", "False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes notes_desc = ['C', 'Cb',", "- 1 if original_len == copy_len else copy_len def not_flat_or_sharp(char): return char !=", "> original_len: return original_len - 1 return copy_len - 1 if original_len ==", "copy_len > original_len: return original_len - 1 return copy_len - 1 if original_len", "substring of original for i in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) #", "in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times down for i", "Read line line = input().split(' ') # Get the length of musics num_original", "original for i in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the", "= True break # # Puts the output print('S' if is_copy else 'N')", "notes_desc) first_iterations = False return transpose_copy_notes # Infinity loop while True: # Read", "of original and last char is not # or b last_char_of_substr = original[lp(copy,", "= '' first_iterations = True for note in copy_notes.split(' '): transpose_copy_notes += (''", "continue # Transpose copy 1 time up and check substring of original is_copy", "return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes notes_desc = ['C', 'Cb', 'B',", "if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # If not", "copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr and not #", "the notes notes_asc = ['C', 'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#', 'G',", "return transpose_copy_notes # Infinity loop while True: # Read line line = input().split('", "# Get last char of original after length of transposed copy last_char_of_substr =", "transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy", "and not_flat_or_sharp(last_char_of_substr): is_copy = True break # # Puts the output print('S' if", "# Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last char", "char of original after length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] #", "if note == 'B#': return 'C#' if note == 'B': return 'C' current_index", "line line = input().split(' ') # Get the length of musics num_original =", "check substring of original for i in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy))", "else ' ') + transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes # Infinity", "transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original", "'C#' if note == 'B': return 'C' current_index = notes.index(note) if current_index ==", "continue # Check 12 times down for i in range(0, 12): # print(transposed_copy,", "'|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)]", "musics num_original = int(line[0]) num_copy = int(line[1]) # Check if is end of", "up and check substring of original is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy", "not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times down for i in range(0, 12):", "last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr and not # or", "notes_desc = ['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E',", "12 times and check substring of original for i in range(1, 12): #", "Transpose copy 1 time up and check substring of original is_copy = False", "('' if first_iterations else ' ') + transpose_note(note, notes_desc) first_iterations = False return", "'): transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_asc) first_iterations", "is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split('", "original_len = len(original) if copy_len > original_len: return original_len - 1 return copy_len", "= input() # Read the copy copy = input() # Check if copy", "Get the length of musics num_original = int(line[0]) num_copy = int(line[1]) # Check", "original is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original =", "return 'F#' if note == 'B#': return 'C#' if note == 'B': return", "last_char_of_substr = original[lp(copy, original)] if copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue #", "= ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr):", "Declare the notes notes_asc = ['C', 'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#',", "= ''.join(transposed_copy.split(' ')) # Get last char of original after length of transposed", "'F', 'Fb', 'E', 'Eb', 'D', 'Db'] # Split notes of copy transpose_copy_notes =", "'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db'] # Split notes of", "= False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes notes_desc = ['C',", "check substring of original is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split('", "not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1 time up and check substring of", "Check if is end of program if num_copy == num_original and num_copy ==", "= True break # If not a copy check 1 time down if", "'C' current_index = notes.index(note) if current_index == len(notes) - 1: current_index = -1", "without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and", "num_copy == 0: break # Read the original original = input() # Read", "return notes[current_index + 1] def transpose_copy(copy_notes): # Declare the notes notes_asc = ['C',", "== 0: break # Read the original original = input() # Read the", "first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes notes_desc =", "time down if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr", "')) # Get last char of original after length of transposed copy last_char_of_substr", "True: # Read line line = input().split(' ') # Get the length of", "'Eb', 'D', 'Db'] # Split notes of copy transpose_copy_notes = '' first_iterations =", "= without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr and not # or b", "without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # # Puts the", "transposed_copy = transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) #", "last char is not # or b last_char_of_substr = original[lp(copy, original)] if copy", "copy = input() # Check if copy is substring of original and last", "'b' def transpose_note(note, notes): if note == 'E#': return 'F#' if note ==", "= without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check", "1 return copy_len - 1 if original_len == copy_len else copy_len def not_flat_or_sharp(char):", "# print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy = transpose_copy(transposed_copy) # Remove", "'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db'] # Split notes of copy", "''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S')", "times and check substring of original for i in range(1, 12): # print(transposed_copy,", "in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12 times and check", "Check if is substr and not # or b if without_spaces_transposed_copy in without_spaces_original", "input() # Read the copy copy = input() # Check if copy is", "notes[current_index + 1] def transpose_copy(copy_notes): # Declare the notes notes_asc = ['C', 'C#',", "else copy_len def not_flat_or_sharp(char): return char != '#' and char != 'b' def", "if copy is substring of original and last char is not # or", "== num_original and num_copy == 0: break # Read the original original =", "= transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in", "copy_len else copy_len def not_flat_or_sharp(char): return char != '#' and char != 'b'", "char != 'b' def transpose_note(note, notes): if note == 'E#': return 'F#' if", "== len(notes) - 1: current_index = -1 return notes[current_index + 1] def transpose_copy(copy_notes):", "+ transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes # Infinity loop while True:", "= ['C', 'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#',", "# Check if is end of program if num_copy == num_original and num_copy", "= False return transpose_copy_notes # Infinity loop while True: # Read line line", "# or b last_char_of_substr = original[lp(copy, original)] if copy in original and not_flat_or_sharp(last_char_of_substr):", "len(original) if copy_len > original_len: return original_len - 1 return copy_len - 1", "break # If not a copy check 1 time down if not is_copy:", "''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S')", "transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_desc) first_iterations =", "12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy = transpose_copy(transposed_copy) #", "print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces", "# Infinity loop while True: # Read line line = input().split(' ') #", "if first_iterations else ' ') + transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes", "# If not a copy check 1 time down if not is_copy: transposed_copy", "!= 'b' def transpose_note(note, notes): if note == 'E#': return 'F#' if note", "num_copy == num_original and num_copy == 0: break # Read the original original", "original after length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if", "# Check if is substr and not # or b if without_spaces_transposed_copy in", "if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times down", "') + transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare", "'G', 'G#', 'A', 'A#', 'B', 'B#'] # Split notes of copy transpose_copy_notes =", "+ transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the", "if copy_len > original_len: return original_len - 1 return copy_len - 1 if", "'D', 'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#'] #", "'F#' if note == 'B#': return 'C#' if note == 'B': return 'C'", "or b last_char_of_substr = original[lp(copy, original)] if copy in original and not_flat_or_sharp(last_char_of_substr): print('S')", "transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if", "Declare the notes notes_desc = ['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb',", "'Db'] # Split notes of copy transpose_copy_notes = '' first_iterations = True for", "is_copy = True break # # Puts the output print('S' if is_copy else", "the copy copy = input() # Check if copy is substring of original", "input().split(' ') # Get the length of musics num_original = int(line[0]) num_copy =", "notes notes_desc = ['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb',", "length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr", "not # or b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True", "input() # Check if copy is substring of original and last char is", "without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # If not a", "original): copy_len = len(copy) original_len = len(original) if copy_len > original_len: return original_len", "= transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get", "False return transpose_copy_notes # Infinity loop while True: # Read line line =", "without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times down for i in", "transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr and not", "['C', 'C#', 'D', 'D#', 'E', 'E#', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B',", "last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True", "= original[lp(copy, original)] if copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose", "# Declare the notes notes_desc = ['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G',", "of original is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original", "# Check 12 times down for i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||',", "= transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy,", "else ' ') + transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes):", "length of musics num_original = int(line[0]) num_copy = int(line[1]) # Check if is", "len(copy) original_len = len(original) if copy_len > original_len: return original_len - 1 return", "') + transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes # Infinity loop while", "transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes", "== 'B#': return 'C#' if note == 'B': return 'C' current_index = notes.index(note)", "' ') + transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes def transpose_copy_down(copy_notes): #", "first_iterations else ' ') + transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes #", "is not # or b last_char_of_substr = original[lp(copy, original)] if copy in original", "and not_flat_or_sharp(last_char_of_substr): is_copy = True break # If not a copy check 1", "''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy", "''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in", "substring of original and last char is not # or b last_char_of_substr =", "program if num_copy == num_original and num_copy == 0: break # Read the", "current_index = -1 return notes[current_index + 1] def transpose_copy(copy_notes): # Declare the notes", "for i in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition", "12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr", "'A#', 'B', 'B#'] # Split notes of copy transpose_copy_notes = '' first_iterations =", "= -1 return notes[current_index + 1] def transpose_copy(copy_notes): # Declare the notes notes_asc", "1 if original_len == copy_len else copy_len def not_flat_or_sharp(char): return char != '#'", "def transpose_note(note, notes): if note == 'E#': return 'F#' if note == 'B#':", "copy_len - 1 if original_len == copy_len else copy_len def not_flat_or_sharp(char): return char", "return 'C' current_index = notes.index(note) if current_index == len(notes) - 1: current_index =", "'E#': return 'F#' if note == 'B#': return 'C#' if note == 'B':", "copy transpose_copy_notes = '' first_iterations = True for note in copy_notes.split(' '): transpose_copy_notes", "transpose_note(note, notes_desc) first_iterations = False return transpose_copy_notes # Infinity loop while True: #", "Read the original original = input() # Read the copy copy = input()", "True for note in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else '", "notes of copy transpose_copy_notes = '' first_iterations = True for note in copy_notes.split('", "= int(line[1]) # Check if is end of program if num_copy == num_original", "i in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy =", "in original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1 time up and", "spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last char of original", "def not_flat_or_sharp(char): return char != '#' and char != 'b' def transpose_note(note, notes):", "int(line[0]) num_copy = int(line[1]) # Check if is end of program if num_copy", "= ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy", "== 'B': return 'C' current_index = notes.index(note) if current_index == len(notes) - 1:", "transpose_note(note, notes): if note == 'E#': return 'F#' if note == 'B#': return", "last char of original after length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)]", "transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)]", "if original_len == copy_len else copy_len def not_flat_or_sharp(char): return char != '#' and", "current_index = notes.index(note) if current_index == len(notes) - 1: current_index = -1 return", "+ 1] def transpose_copy(copy_notes): # Declare the notes notes_asc = ['C', 'C#', 'D',", "Check if copy is substring of original and last char is not #", "'F', 'F#', 'G', 'G#', 'A', 'A#', 'B', 'B#'] # Split notes of copy", "transpose_copy_notes = '' first_iterations = True for note in copy_notes.split(' '): transpose_copy_notes +=", "'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db'] # Split notes of copy transpose_copy_notes", "and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times down for i in range(0,", "copy 12 times and check substring of original for i in range(1, 12):", "'B#': return 'C#' if note == 'B': return 'C' current_index = notes.index(note) if", "= ''.join(original.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr):", "without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 12 times and", "without_spaces_original)] # Check if is substr and not # or b if without_spaces_transposed_copy", "# Read the copy copy = input() # Check if copy is substring", "# Split notes of copy transpose_copy_notes = '' first_iterations = True for note", "- 1 return copy_len - 1 if original_len == copy_len else copy_len def", "note in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else ' ') +", "if current_index == len(notes) - 1: current_index = -1 return notes[current_index + 1]", "in range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split('", "# Transpose the transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy", "# Transpose copy 12 times and check substring of original for i in", "return copy_len - 1 if original_len == copy_len else copy_len def not_flat_or_sharp(char): return", "def transpose_copy(copy_notes): # Declare the notes notes_asc = ['C', 'C#', 'D', 'D#', 'E',", "transpose_copy_notes def transpose_copy_down(copy_notes): # Declare the notes notes_desc = ['C', 'Cb', 'B', 'Bb',", "not_flat_or_sharp(last_char_of_substr): is_copy = True break # If not a copy check 1 time", "and check substring of original for i in range(1, 12): # print(transposed_copy, '|||###########||||',", "original)] if copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1", "note == 'B': return 'C' current_index = notes.index(note) if current_index == len(notes) -", "+= ('' if first_iterations else ' ') + transpose_note(note, notes_desc) first_iterations = False", "of original for i in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose", "1] def transpose_copy(copy_notes): # Declare the notes notes_asc = ['C', 'C#', 'D', 'D#',", "'B#'] # Split notes of copy transpose_copy_notes = '' first_iterations = True for", "= transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in", "and last char is not # or b last_char_of_substr = original[lp(copy, original)] if", "num_original and num_copy == 0: break # Read the original original = input()", "'G#', 'A', 'A#', 'B', 'B#'] # Split notes of copy transpose_copy_notes = ''", "and not # or b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy =", "return 'C#' if note == 'B': return 'C' current_index = notes.index(note) if current_index", "Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) # Get last char of", "time up and check substring of original is_copy = False transposed_copy = transpose_copy(copy)", "'Fb', 'E', 'Eb', 'D', 'Db'] # Split notes of copy transpose_copy_notes = ''", "is substr and not # or b if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr):", "without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is substr and not # or b if", "== copy_len else copy_len def not_flat_or_sharp(char): return char != '#' and char !=", "without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12 times", "and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1 time up and check substring", "# print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr =", "and char != 'b' def transpose_note(note, notes): if note == 'E#': return 'F#'", "= input().split(' ') # Get the length of musics num_original = int(line[0]) num_copy", "current_index == len(notes) - 1: current_index = -1 return notes[current_index + 1] def", "i in range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy", "1 time down if not is_copy: transposed_copy = transpose_copy_down(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' '))", "'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db'] # Split notes", "def transpose_copy_down(copy_notes): # Declare the notes notes_desc = ['C', 'Cb', 'B', 'Bb', 'A',", "'B': return 'C' current_index = notes.index(note) if current_index == len(notes) - 1: current_index", "'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D', 'Db'] # Split", "first_iterations = False return transpose_copy_notes # Infinity loop while True: # Read line", "len(notes) - 1: current_index = -1 return notes[current_index + 1] def transpose_copy(copy_notes): #", "'|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces from", "is end of program if num_copy == num_original and num_copy == 0: break", "transpose_copy_notes += ('' if first_iterations else ' ') + transpose_note(note, notes_asc) first_iterations =", "print('S') continue # Check 12 times down for i in range(0, 12): #", "in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # If not a copy", "copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy 1 time up", "original_len - 1 return copy_len - 1 if original_len == copy_len else copy_len", "'#' and char != 'b' def transpose_note(note, notes): if note == 'E#': return", "original_len == copy_len else copy_len def not_flat_or_sharp(char): return char != '#' and char", "after length of transposed copy last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] # Check if is", "original[lp(copy, original)] if copy in original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy", "range(0, 12): # print(transposed_copy, '|||bbbbbbbbb||||', transpose_copy(transposed_copy)) transposed_copy = transpose_copy_down(transposed_copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' '))", "= without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose", "last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue #", "# Read line line = input().split(' ') # Get the length of musics", "0: break # Read the original original = input() # Read the copy", "int(line[1]) # Check if is end of program if num_copy == num_original and", "end of program if num_copy == num_original and num_copy == 0: break #", "= False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' '))", "in without_spaces_original and not_flat_or_sharp(last_char_of_substr): is_copy = True break # # Puts the output", "without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Transpose copy", "first_iterations else ' ') + transpose_note(note, notes_asc) first_iterations = False return transpose_copy_notes def", "If not a copy check 1 time down if not is_copy: transposed_copy =", "without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and not_flat_or_sharp(last_char_of_substr): print('S') continue # Check 12", "is_copy = True break # If not a copy check 1 time down", "+= ('' if first_iterations else ' ') + transpose_note(note, notes_asc) first_iterations = False", "range(1, 12): # print(transposed_copy, '|||###########||||', transpose_copy(transposed_copy)) # Transpose the transposition transposed_copy = transpose_copy(transposed_copy)", "the transposition transposed_copy = transpose_copy(transposed_copy) # Remove spaces from transposition without_spaces_transposed_copy = ''.join(transposed_copy.split('", "and check substring of original is_copy = False transposed_copy = transpose_copy(copy) without_spaces_transposed_copy =", "transposed_copy = transpose_copy(copy) without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) without_spaces_original = ''.join(original.split(' ')) last_char_of_substr =", "= ['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb',", "'A', 'A#', 'B', 'B#'] # Split notes of copy transpose_copy_notes = '' first_iterations", "1 time up and check substring of original is_copy = False transposed_copy =", "notes): if note == 'E#': return 'F#' if note == 'B#': return 'C#'", "without_spaces_transposed_copy = ''.join(transposed_copy.split(' ')) last_char_of_substr = without_spaces_original[lp(without_spaces_transposed_copy, without_spaces_original)] if without_spaces_transposed_copy in without_spaces_original and", "not a copy check 1 time down if not is_copy: transposed_copy = transpose_copy_down(copy)", "Read the copy copy = input() # Check if copy is substring of", "['C', 'Cb', 'B', 'Bb', 'A', 'Ab', 'G', 'Gb', 'F', 'Fb', 'E', 'Eb', 'D',", "for note in copy_notes.split(' '): transpose_copy_notes += ('' if first_iterations else ' ')", "Transpose copy 12 times and check substring of original for i in range(1,", "'' first_iterations = True for note in copy_notes.split(' '): transpose_copy_notes += ('' if", "the original original = input() # Read the copy copy = input() #", "copy copy = input() # Check if copy is substring of original and", "notes.index(note) if current_index == len(notes) - 1: current_index = -1 return notes[current_index +", "not_flat_or_sharp(char): return char != '#' and char != 'b' def transpose_note(note, notes): if", "if note == 'E#': return 'F#' if note == 'B#': return 'C#' if", "= notes.index(note) if current_index == len(notes) - 1: current_index = -1 return notes[current_index", "'D', 'Db'] # Split notes of copy transpose_copy_notes = '' first_iterations = True" ]
[ "('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False,", "verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote", "Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc',", "verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall", "verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel(", "fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall", "verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')),", "('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote", "= [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id',", "('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress',", "blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True,", "models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),", "models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40,", "null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False,", "('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)),", "models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)),", "'0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name',", "models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress',", "blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted',", "('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)),", "'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True,", "models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table':", "import settings class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations =", "(b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min',", "group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default',", "blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID',", "range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group", "verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall_rules', },", "Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', },", "verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall',", "('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50,", "min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol',", "verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center',", "null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True,", "models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall_rules',", "verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)),", "-*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations", "'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')],", "models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted',", "verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create", "blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')),", "models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True,", "Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date',", "coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from", "('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max',", "options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id',", "blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)),", "null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol',", "null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')),", "('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0,", "[ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128,", "'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)),", "migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL),", "models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')),", "('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)),", "choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6',", "migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128,", "('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID',", "null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True,", "null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0',", "max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True,", "from __future__ import unicode_literals from django.db import models, migrations from django.conf import settings", "utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations from django.conf", "Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall',", "from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies =", "('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')),", "from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ]", "id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False,", "('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={", "settings class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [", "Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')),", "migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID',", "] operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall", "Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall_rules', }, bases=(models.Model,),", "-*- from __future__ import unicode_literals from django.db import models, migrations from django.conf import", "django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [", "Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True,", "unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies", "null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10,", "import models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('idc',", "('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id',", "verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port", "('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')),", "), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules", "null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True,", "('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules',", "bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall", "verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min',", "models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall_rules', }, bases=(models.Model,), ), ]", "verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')),", "name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)),", "('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall_rules', }, bases=(models.Model,), ),", "models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)),", "choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port", "], options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)),", "dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall', fields=[", "max', blank=True)), ('protocol', models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id", "desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user',", "primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'),", "blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether", "verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True,", "class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel(", "blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4',", "prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall',", "max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max',", "blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0,", "('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center',", "'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range", "operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')),", "models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')),", "verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ],", "blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),", "ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')),", "[ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False,", "('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id',", "migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True,", "('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress',", "'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40,", "models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip", "('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user',", "null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False,", "models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default',", "name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS", "('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,),", "models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True, null=True, verbose_name='Ether type')), ('port_range_min', models.IntegerField(default=0, null=True,", "('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4',", "UUID', blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True,", "models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')],", "= [ migrations.CreateModel( name='Firewall', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id',", "models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False,", "models.CharField(max_length=40, null=True, verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix',", "models.BooleanField(default=False, verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ],", "('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table':", "import unicode_literals from django.db import models, migrations from django.conf import settings class Migration(migrations.Migration):", "models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[", "models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'),", "django.conf import settings class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations", "verbose_name='Protocol', blank=True)), ('remote_group_id', models.CharField(max_length=40, null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255,", "(b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40,", "verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40,", "}, bases=(models.Model,), ), migrations.CreateModel( name='FirewallRules', fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS", "fields=[ ('id', models.AutoField(serialize=False, primary_key=True)), ('firewall_rules_id', models.CharField(max_length=40, null=True, verbose_name='OS Firewall Rules UUID', blank=True)), ('direction',", "max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('deleted', models.BooleanField(default=False, verbose_name='Deleted')), ('create_date',", "UUID', blank=True)), ('direction', models.CharField(default=b'ingress', choices=[(b'ingress', 'Ingress'), (b'egress', 'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type',", "primary_key=True)), ('name', models.CharField(max_length=128, verbose_name='Firewall Name')), ('firewall_id', models.CharField(max_length=128, null=True, verbose_name='OS Firewall UUID', blank=True)), ('desc',", "__future__ import unicode_literals from django.db import models, migrations from django.conf import settings class", "null=True, verbose_name='remote group id UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix',", "'Egress')], max_length=10, blank=True, null=True, verbose_name='Direction')), ('ether_type', models.CharField(default=b'IPv4', choices=[(b'IPv4', 'IPv4'), (b'IPv6', 'IPv6')], max_length=40, blank=True,", "models.BooleanField(default=False, verbose_name='Deleted')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={ 'db_table': 'firewall', }, bases=(models.Model,), ),", "type')), ('port_range_min', models.IntegerField(default=0, null=True, verbose_name='Port range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range", "verbose_name='Deleted')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')), ('firewall', models.ForeignKey(to='firewall.Firewall')), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)), ('user_data_center', models.ForeignKey(to='idc.UserDataCenter')), ], options={", "blank=True)), ('desc', models.CharField(max_length=50, null=True, verbose_name='Firewall desc', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')), ('create_date', models.DateTimeField(auto_now_add=True, verbose_name='Create", "UUID', blank=True)), ('remote_ip_prefix', models.CharField(default=b'0.0.0.0/0', max_length=255, null=True, verbose_name='remote ip prefix', blank=True)), ('is_default', models.BooleanField(default=False, verbose_name='Default')),", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models,", "range min', blank=True)), ('port_range_max', models.IntegerField(default=0, null=True, verbose_name='Port range max', blank=True)), ('protocol', models.CharField(max_length=40, null=True,", "models, migrations from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('idc', '0001_initial')," ]
[ "\"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ),", "self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"),", "), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\"", ", \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\" ),", ", \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\"", ", \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True", "from solver import buddy_strings class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True", ") self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\",", "self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\"", "), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\" ), False) if", "import buddy_strings class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\"", "test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\"", "solver import buddy_strings class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True )", "True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\" ), False) if __name__", "self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\" ), False) if __name__ == \"__main__\":", "True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True )", "\"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\" ), False)", "import unittest from solver import buddy_strings class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\"", "False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" ,", "class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\"", "def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False)", "unittest from solver import buddy_strings class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ),", ") self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\" ), False) if __name__ ==", "\"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True ) self.assertEqual(buddy_strings(\"aaaaaaabc\", \"aaaaaaacb\"), True )", "\"aaaaaaacb\"), True ) self.assertEqual(buddy_strings(\"\" , \"aa\" ), False) if __name__ == \"__main__\": unittest.main()", "self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" ,", "), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ), False) self.assertEqual(buddy_strings(\"aa\" , \"aa\" ), True", "TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" , \"ab\" ),", "buddy_strings class TestSolver(unittest.TestCase): def test_buddy_strings(self): self.assertEqual(buddy_strings(\"ab\" , \"ba\" ), True ) self.assertEqual(buddy_strings(\"ab\" ," ]
[ "pass def test_add_price(self): \"\"\"Test case for add_price Add price for tenant and event", "E501 \"\"\" pass def test_update_price(self): \"\"\"Test case for update_price Update price for an", "def tearDown(self): pass def test_add_price(self): \"\"\"Test case for add_price Add price for tenant", "self.api = BillingPricesApi() # noqa: E501 def tearDown(self): pass def test_add_price(self): \"\"\"Test case", "E501 \"\"\" pass def test_get_price(self): \"\"\"Test case for get_price Get a price. #", "= BillingPricesApi() # noqa: E501 def tearDown(self): pass def test_add_price(self): \"\"\"Test case for", "BillingPricesApi() # noqa: E501 def tearDown(self): pass def test_add_price(self): \"\"\"Test case for add_price", "a price. # noqa: E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test case for get_prices_page", "price. # noqa: E501 \"\"\" pass def test_get_price(self): \"\"\"Test case for get_price Get", "noqa: E501 \"\"\" pass def test_get_price(self): \"\"\"Test case for get_price Get a price.", "\"\"\" pass def test_get_price(self): \"\"\"Test case for get_price Get a price. # noqa:", "for delete_price Delete a price. # noqa: E501 \"\"\" pass def test_get_price(self): \"\"\"Test", "delete_price Delete a price. # noqa: E501 \"\"\" pass def test_get_price(self): \"\"\"Test case", "SDK MSX SDK client. # noqa: E501 The version of the OpenAPI document:", "pass def test_update_price(self): \"\"\"Test case for update_price Update price for an event type", "for get_prices_page Retrieve a page of prices. # noqa: E501 \"\"\" pass def", "def setUp(self): self.api = BillingPricesApi() # noqa: E501 def tearDown(self): pass def test_add_price(self):", "a page of prices. # noqa: E501 \"\"\" pass def test_update_price(self): \"\"\"Test case", "type and tenant. # noqa: E501 \"\"\" pass if __name__ == '__main__': unittest.main()", "test_update_price(self): \"\"\"Test case for update_price Update price for an event type and tenant.", "for update_price Update price for an event type and tenant. # noqa: E501", "client. # noqa: E501 The version of the OpenAPI document: 1.0.9 Generated by:", "Get a price. # noqa: E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test case for", "the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk from", "an event type and tenant. # noqa: E501 \"\"\" pass if __name__ ==", "The version of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\" import unittest", "of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk", "document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import", "# noqa: E501 The version of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech", "E501 \"\"\" pass def test_delete_price(self): \"\"\"Test case for delete_price Delete a price. #", "\"\"\"Test case for get_prices_page Retrieve a page of prices. # noqa: E501 \"\"\"", "event type. # noqa: E501 \"\"\" pass def test_delete_price(self): \"\"\"Test case for delete_price", "Add price for tenant and event type. # noqa: E501 \"\"\" pass def", "MSX SDK MSX SDK client. # noqa: E501 The version of the OpenAPI", "\"\"\"Test case for delete_price Delete a price. # noqa: E501 \"\"\" pass def", "pass def test_delete_price(self): \"\"\"Test case for delete_price Delete a price. # noqa: E501", "price for an event type and tenant. # noqa: E501 \"\"\" pass if", "BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self): self.api", "version of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\" import unittest import", "case for get_prices_page Retrieve a page of prices. # noqa: E501 \"\"\" pass", "noqa: E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve a page", "type. # noqa: E501 \"\"\" pass def test_delete_price(self): \"\"\"Test case for delete_price Delete", "\"\"\" MSX SDK MSX SDK client. # noqa: E501 The version of the", "case for add_price Add price for tenant and event type. # noqa: E501", "of prices. # noqa: E501 \"\"\" pass def test_update_price(self): \"\"\"Test case for update_price", "event type and tenant. # noqa: E501 \"\"\" pass if __name__ == '__main__':", "OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api", "Generated by: https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi #", "test_delete_price(self): \"\"\"Test case for delete_price Delete a price. # noqa: E501 \"\"\" pass", "# noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self): self.api =", "and event type. # noqa: E501 \"\"\" pass def test_delete_price(self): \"\"\"Test case for", "E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve a page of", "https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501", "noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self): self.api = BillingPricesApi()", "unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi", "# noqa: E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve a", "for get_price Get a price. # noqa: E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test", "get_prices_page Retrieve a page of prices. # noqa: E501 \"\"\" pass def test_update_price(self):", "noqa: E501 def tearDown(self): pass def test_add_price(self): \"\"\"Test case for add_price Add price", "1.0.9 Generated by: https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi", "test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve a page of prices. # noqa: E501", "by: https://openapi-generator.tech \"\"\" import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa:", "<reponame>CiscoDevNet/python-msx-sdk \"\"\" MSX SDK MSX SDK client. # noqa: E501 The version of", "def test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve a page of prices. # noqa:", "# noqa: E501 \"\"\" pass def test_update_price(self): \"\"\"Test case for update_price Update price", "class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self): self.api = BillingPricesApi() # noqa:", "\"\"\"Test case for add_price Add price for tenant and event type. # noqa:", "noqa: E501 \"\"\" pass def test_update_price(self): \"\"\"Test case for update_price Update price for", "for add_price Add price for tenant and event type. # noqa: E501 \"\"\"", "setUp(self): self.api = BillingPricesApi() # noqa: E501 def tearDown(self): pass def test_add_price(self): \"\"\"Test", "SDK client. # noqa: E501 The version of the OpenAPI document: 1.0.9 Generated", "stubs\"\"\" def setUp(self): self.api = BillingPricesApi() # noqa: E501 def tearDown(self): pass def", "price. # noqa: E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve", "import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase):", "\"\"\" pass def test_delete_price(self): \"\"\"Test case for delete_price Delete a price. # noqa:", "get_price Get a price. # noqa: E501 \"\"\" pass def test_get_prices_page(self): \"\"\"Test case", "pass def test_get_price(self): \"\"\"Test case for get_price Get a price. # noqa: E501", "case for update_price Update price for an event type and tenant. # noqa:", "for an event type and tenant. # noqa: E501 \"\"\" pass if __name__", "test_add_price(self): \"\"\"Test case for add_price Add price for tenant and event type. #", "E501 def tearDown(self): pass def test_add_price(self): \"\"\"Test case for add_price Add price for", "\"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self): self.api = BillingPricesApi() # noqa: E501 def", "def test_get_price(self): \"\"\"Test case for get_price Get a price. # noqa: E501 \"\"\"", "Retrieve a page of prices. # noqa: E501 \"\"\" pass def test_update_price(self): \"\"\"Test", "test stubs\"\"\" def setUp(self): self.api = BillingPricesApi() # noqa: E501 def tearDown(self): pass", "from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\"", "Update price for an event type and tenant. # noqa: E501 \"\"\" pass", "import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit", "# noqa: E501 \"\"\" pass def test_delete_price(self): \"\"\"Test case for delete_price Delete a", "python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def", "# noqa: E501 def tearDown(self): pass def test_add_price(self): \"\"\"Test case for add_price Add", "pass def test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve a page of prices. #", "tenant and event type. # noqa: E501 \"\"\" pass def test_delete_price(self): \"\"\"Test case", "Delete a price. # noqa: E501 \"\"\" pass def test_get_price(self): \"\"\"Test case for", "import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self):", "\"\"\"Test case for update_price Update price for an event type and tenant. #", "for tenant and event type. # noqa: E501 \"\"\" pass def test_delete_price(self): \"\"\"Test", "\"\"\"Test case for get_price Get a price. # noqa: E501 \"\"\" pass def", "MSX SDK client. # noqa: E501 The version of the OpenAPI document: 1.0.9", "python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test", "page of prices. # noqa: E501 \"\"\" pass def test_update_price(self): \"\"\"Test case for", "E501 The version of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\" import", "unit test stubs\"\"\" def setUp(self): self.api = BillingPricesApi() # noqa: E501 def tearDown(self):", "test_get_price(self): \"\"\"Test case for get_price Get a price. # noqa: E501 \"\"\" pass", "prices. # noqa: E501 \"\"\" pass def test_update_price(self): \"\"\"Test case for update_price Update", "case for get_price Get a price. # noqa: E501 \"\"\" pass def test_get_prices_page(self):", "\"\"\" import unittest import python_msx_sdk from python_msx_sdk.api.billing_prices_api import BillingPricesApi # noqa: E501 class", "update_price Update price for an event type and tenant. # noqa: E501 \"\"\"", "noqa: E501 \"\"\" pass def test_delete_price(self): \"\"\"Test case for delete_price Delete a price.", "def test_update_price(self): \"\"\"Test case for update_price Update price for an event type and", "\"\"\" pass def test_get_prices_page(self): \"\"\"Test case for get_prices_page Retrieve a page of prices.", "def test_add_price(self): \"\"\"Test case for add_price Add price for tenant and event type.", "case for delete_price Delete a price. # noqa: E501 \"\"\" pass def test_get_price(self):", "add_price Add price for tenant and event type. # noqa: E501 \"\"\" pass", "noqa: E501 The version of the OpenAPI document: 1.0.9 Generated by: https://openapi-generator.tech \"\"\"", "def test_delete_price(self): \"\"\"Test case for delete_price Delete a price. # noqa: E501 \"\"\"", "tearDown(self): pass def test_add_price(self): \"\"\"Test case for add_price Add price for tenant and", "a price. # noqa: E501 \"\"\" pass def test_get_price(self): \"\"\"Test case for get_price", "TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self): self.api = BillingPricesApi() # noqa: E501", "E501 class TestBillingPricesApi(unittest.TestCase): \"\"\"BillingPricesApi unit test stubs\"\"\" def setUp(self): self.api = BillingPricesApi() #", "# noqa: E501 \"\"\" pass def test_get_price(self): \"\"\"Test case for get_price Get a", "price for tenant and event type. # noqa: E501 \"\"\" pass def test_delete_price(self):", "\"\"\" pass def test_update_price(self): \"\"\"Test case for update_price Update price for an event" ]
[ "[] def __build_frcnn(self): \"\"\"Create the unified model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) #", "\"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights from a pretrained model.\"\"\"", "Trainer(object): \"\"\"Setup training and run for some epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer,", "and Y negatives samples for complete number RoIs. \"\"\" if self.config.num_rois > 1:", "data generators, shuffle the data and create other data structures. \"\"\" # Randomize", "LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile(", "self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the training", "def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2): \"\"\"Update losses for RPN", "can be found in the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show", "and create other data structures. \"\"\" # Randomize data random.shuffle(self.all_data) # Set for", "is completed + allowed verbose, then: # print the average number of overlapping", "loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total", "Data generators self.data_gen_train = None self.data_gen_val = None # Input Tensor Regions of", "None # Data generators self.data_gen_train = None self.data_gen_val = None # Input Tensor", "else: pos_samples = [] return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X", "RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding", "truth boxes. Check RPN settings or keep training.\" print(message) def __validate_samples(self, neg_samples, pos_samples):", "Leave the negative samples list empty neg_samples = [] if len(pos_samples) > 0:", "self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples", "device the operation ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def configure( self,", "print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model weights.\") print(\"Weights can be found in the", "data that it will use for training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count) print('Num", "init time for current epoch # Instance progress bar for display progress in", "written to {}, and can be ' message += 'loaded when testing to", "# Datasets for training, split 80% training and 20% for validation self.train_images =", "self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5))", "use_gpu self.parser = None self.all_data = [] self.classes_count = [] self.class_mapping = []", "this parameter takes the value False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois -", ").tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The", "pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives", "[ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]),", "training process.\"\"\" # Config file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path =", "True: try: # If an epoch is completed + allowed verbose, then: #", "Get negatives samples and positive samples (IoU > thresh) neg_samples = np.where(Y1[0, :,", "\" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message =", "RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper = ROIHelpers( self.config,", "training has begun :)\") for epoch_num in range(self.config.num_epochs): start_time = time.time() # init", "self.num_images = len(self.all_data) self.train_images = [s for s in self.all_data if s['imageset'] ==", "('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ] ) iter_num", "pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def", "completed if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num", "return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2): \"\"\"Update losses", "loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance", "data and create other data structures. \"\"\" # Randomize data random.shuffle(self.all_data) # Set", "the average number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni ==", "print(\"The training has begun :)\") for epoch_num in range(self.config.num_epochs): start_time = time.time() #", "2): sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples,", "models for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier)", "# add data to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls)", "= Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls,", "from frcnn.utilities.parser import Parser from frcnn.utilities.history import History class Trainer(object): \"\"\"Setup training and", "pretrained model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True )", "self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans # If bg was not added, it", "Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num,", "print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path", "Model from keras.utils import generic_utils from frcnn.data_generator import Metrics, Utils from frcnn.losses import", "# If an epoch is completed + allowed verbose, then: # print the", "optparse import OptionParser import numpy as np import tensorflow as tf from keras", "= np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples", "total_time = time.time() - start_time self.rpn_accuracy_for_epoch = [] # Print the resume of", "message = 'Mean number of bounding boxes from RPN overlapping ground truth boxes:", "dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping", "backend as K from keras.optimizers import Adam, SGD, RMSprop from keras.layers import Input", "neg_samples, pos_samples): \"\"\"Select X positives samples and Y negatives samples for complete number", "= epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios)", "# Get negatives samples and positive samples (IoU > thresh) neg_samples = np.where(Y1[0,", "the detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models for", "final losses after the epochs ends.\"\"\" # Average losses loss_rpn_cls = np.mean(self.losses[:, 0])", "Y1, Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2 is None:", "config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path,", "boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr))", "self.data_gen_train = None self.data_gen_val = None # Input Tensor Regions of Interest self.roi_input", "{}'.format(e)) continue print('Training complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize data generators, shuffle the", "data for training process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config,", "True # to log device placement (on which device the operation ran) config_gpu.log_device_placement", "LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error )", "neg_samples = neg_samples[0] else: # Leave the negative samples list empty neg_samples =", "info = [] # add data to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes)", "to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save the best", "= np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc =", "format X2, Y1, Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2", "epoch if self.config.verbose: message = 'Mean number of bounding boxes from RPN overlapping", "Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\"", ") iter_num += 1 # If the current epoch is completed if iter_num", "epoch_num in range(self.config.num_epochs): start_time = time.time() # init time for current epoch #", "None # Convolutional Neural Network self.cnn = None # Data generators self.data_gen_train =", "self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI", "and positive samples (IoU > thresh) neg_samples = np.where(Y1[0, :, -1] == 1)", "weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as e:", "# init time for current epoch # Instance progress bar for display progress", "from annotate file or create annotate file from dataset. \"\"\" # Instance parser,", "= loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num,", "file or create annotate file from dataset. \"\"\" # Instance parser, recover data", "Update the best loss if the current loss is better. if curr_loss <", "loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss,", "train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train =", "neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and negatives samples", "metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error ) # test", "# Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence the config data", "# Instance parser, recover data from annotate file or dataset self.parser = Parser(", "info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ == '__main__': results_path", "np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time()", "sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2]", "data from annotate file or create annotate file from dataset. \"\"\" # Instance", "def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and negatives samples.\"\"\" if len(neg_samples) > 0:", "has been written to {}, and can be ' message += 'loaded when", ") # Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans", "Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) # This is a model that", "in the current epoch progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr',", "compile models.\"\"\" learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer", "self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for image in", "whether or not the selection is made with replacement (default this parameter takes", "= None self.model_all = None # Training process self.iter_num = 0 self.losses =", "self.config.num_rois), len(self.classes_count) ) # Tensor for image in TensorFlow self.input_shape_image = (None, None,", "= num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length =", "ans # If bg was not added, it will be added to the", "settings or keep training.\" print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and negatives", "the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence the config data for training", "config object self.config.class_mapping = self.class_mapping # Show resume from loaded data self.show_info_data() def", "from frcnn.utilities.history import History class Trainer(object): \"\"\"Setup training and run for some epochs.\"\"\"", "from frcnn.data_generator import Metrics, Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers", "0 self.num_anchors = 0 self.input_shape_image = None self.results_path = results_path # Datasets for", "results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process", "{}, saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save the best model", "division import random import pprint import sys import time import pickle import logging", "Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier,", "of bounding boxes from RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message =", "start_time): \"\"\"Update the final losses after the epochs ends.\"\"\" # Average losses loss_rpn_cls", "self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' )", "len(neg_samples) > 0: # Just choose the first one neg_samples = neg_samples[0] else:", "None # Input Tensor Regions of Interest self.roi_input = Input(shape=(None, 4)) # Models", "folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average number of overlapping bboxes.\"\"\" total", "R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num = 0 best_loss = np.Inf #", "tensorflow as tf from keras import backend as K from keras.optimizers import Adam,", "= (None, None, 3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from", "generators self.data_gen_train = None self.data_gen_val = None # Input Tensor Regions of Interest", "Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count,", "print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count))) # Persistence", "self.roi_input], classifier) # This is a model that holds both the RPN and", "test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test save plots", "loaded data self.show_info_data() def show_info_data(self): \"\"\"Show data that it will use for training.\"\"\"", "self.train_images = [s for s in self.all_data if s['imageset'] == 'trainval'] self.val_images =", "Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p') def __prepare_train(self):", "ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi(", "base layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it will assign the class", "sys import time import pickle import logging import traceback from optparse import OptionParser", "process self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None", "This is a model that holds both the RPN and the classifier... #", "None self.model_all = None # Training process self.iter_num = 0 self.losses = None", "== 1) pos_samples = np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples = self.__validate_samples(", "complete number RoIs. \"\"\" if self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois //", "X, X2, Y1, Y2) # Update progress bar in the current epoch progress_bar.update(", "pos_samples = [] return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives", "self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length,", "self.train_images = None self.val_images = None # Convolutional Neural Network self.cnn = None", "np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples))", "for class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2) #", "built on the base layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it will", "(on which device the operation ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def", "for training process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f)", "positives and negatives samples.\"\"\" if len(neg_samples) > 0: # Just choose the first", "+ classifier) # Use to load/save weights for the models. self.__load_weights() # Save", "ends.\"\"\" # Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls", "base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN, built on the", "process self.num_images = len(self.all_data) self.train_images = [s for s in self.all_data if s['imageset']", "if curr_loss < best_loss: message = 'Total loss decreased from {} to {},", "= 0 self.num_anchors = 0 self.input_shape_image = None self.results_path = results_path # Datasets", "the negative samples list empty neg_samples = [] if len(pos_samples) > 0: pos_samples", "# Print the resume of the epoch if self.config.verbose: message = 'Mean number", "rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) # This is a model that holds", "validation self.train_images = None self.val_images = None # Convolutional Neural Network self.cnn =", "completed + allowed verbose, then: # print the average number of overlapping bboxes.", "weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the training process.\"\"\" # Config file", "neg sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples", "= float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = \"Average number of", "self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0],", "to load/save weights for the models self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier)", "# print the average number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 =", "with replacement (default this parameter takes the value False). \"\"\" selected_neg_samples = np.random.choice(", "self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length", "pos_samples) # Update losses, for class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X,", "= Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile(", "= weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate =", "selected_neg_samples else: \"\"\"In the extreme case where num_rois = 1, we pick a", "application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average number of overlapping bboxes.\"\"\"", "if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice(", "path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32,", "self.cnn = None # Data generators self.data_gen_train = None self.data_gen_val = None #", "self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in config object self.config.class_mapping", "Error ) # test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") #", "boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding boxes from RPN: {}'", "not added, it will be added to the image data dictionaries. if 'bg'", "is better. if curr_loss < best_loss: message = 'Total loss decreased from {}", "return best_loss if __name__ == '__main__': results_path = \"training_results/1\" trainer = Trainer(results_path) weights_input_path", "Randomize data random.shuffle(self.all_data) # Set for training process self.num_images = len(self.all_data) self.train_images =", "print(\"Weights can be found in the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self):", "\"\"\" Create optimizers and compile models.\"\"\" learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses", "according to classifier batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:,", "= [] self.classes_count = [] self.class_mapping = [] self.num_images = 0 self.num_anchors =", "self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier =", "{}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images,", "Y negatives samples for complete number RoIs. \"\"\" if self.config.num_rois > 1: if", "neg_samples[0] else: # Leave the negative samples list empty neg_samples = [] if", "the models. self.__load_weights() # Save the models like a trainable object. self.__compile_models() def", "found in the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average", "0 break except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting", "s in self.all_data if s['imageset'] == 'trainval'] self.val_images = [s for s in", "[] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create the unified model Faster R-CNN.\"\"\" img_input", "model weights.\") print(\"Weights can be found in the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\")", "RPN settings or keep training.\" print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and", "samples and positive samples (IoU > thresh) neg_samples = np.where(Y1[0, :, -1] ==", "= loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss: {} vs current", "loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'},", "> 0: pos_samples = pos_samples[0] else: pos_samples = [] return (neg_samples, pos_samples) def", "info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ == '__main__': results_path = \"training_results/1\"", "determines whether or not the selection is made with replacement (default this parameter", "Update losses, for class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1,", "Tensor for image in TensorFlow self.input_shape_image = (None, None, 3) def recover_data( self,", "import logging import traceback from optparse import OptionParser import numpy as np import", "= tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set", "pprint import sys import time import pickle import logging import traceback from optparse", "ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2)", "loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss: {} vs current loss:", "dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans # If bg was", "len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn = CNN( self.num_anchors,", "and compile models.\"\"\" learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors)", "+= 'loaded when testing to ensure correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the", "'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create data generators", "# Randomize data random.shuffle(self.all_data) # Set for training process self.num_images = len(self.all_data) self.train_images", ") self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length,", "import CNN from frcnn.utilities.config import Config from frcnn.utilities.parser import Parser from frcnn.utilities.history import", "logging import traceback from optparse import OptionParser import numpy as np import tensorflow", "Use to load/save weights for the models. self.__load_weights() # Save the models like", "= config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message = 'Config has", "X2, Y1, Y2) # Update progress bar in the current epoch progress_bar.update( iter_num", "# to log device placement (on which device the operation ran) config_gpu.log_device_placement =", "ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans # If bg was not", "progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try: # If", "np.Inf # Start iterative process print(\"The training has begun :)\") for epoch_num in", "run for some epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config()", "while True: try: # If an epoch is completed + allowed verbose, then:", "= [] if len(pos_samples) > 0: pos_samples = pos_samples[0] else: pos_samples = []", "'Mean number of bounding boxes from RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes))", "RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2) # Update progress bar in", "samples for complete number RoIs. \"\"\" if self.config.num_rois > 1: if len(pos_samples) <", "{}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr", "next(self.data_gen_train) # calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with", "persistence the config data for training process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb')", "keep training.\" print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and negatives samples.\"\"\" if", "= 'Total loss decreased from {} to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss", "detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models for Faster", "if len(pos_samples) > 0: pos_samples = pos_samples[0] else: pos_samples = [] return (neg_samples,", "classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls +", "we pick a random pos or neg sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples", "import ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config import Config from frcnn.utilities.parser import", "for Faster R-CNN self.model_rpn = None self.model_classifier = None self.model_all = None #", "# Define the RPN, built on the base layers. rpn = self.cnn.create_rpn(shared_layers) #", "average number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length)", "sel_samples = selected_pos_samples + selected_neg_samples else: \"\"\"In the extreme case where num_rois =", "1, self.config.num_epochs)) while True: try: # If an epoch is completed + allowed", "# Instance a ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) #", "# test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test save", "else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples =", "for epoch_num in range(self.config.num_epochs): start_time = time.time() # init time for current epoch", "// 2, replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False", "replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples else: \"\"\"In the extreme case where", ":p') def __prepare_train(self): \"\"\"Initialize data generators, shuffle the data and create other data", "tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters", "the classifier... # Used to load/save weights for the models self.model_all = Model([img_input,", "keras.optimizers import Adam, SGD, RMSprop from keras.layers import Input from keras.models import Model", "plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights from a", "Generate row for epoch info info = [] # add data to info", "a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois -", "train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num = 0", "grow the memory used on the GPU config_gpu.gpu_options.allow_growth = True # to log", "list empty neg_samples = [] if len(pos_samples) > 0: pos_samples = pos_samples[0] else:", "self.val_images = None # Convolutional Neural Network self.cnn = None # Data generators", "False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples =", "classifier) # Use to load/save weights for the models. self.__load_weights() # Save the", "SGD, RMSprop from keras.layers import Input from keras.models import Model from keras.utils import", "per class:') pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count))) # Persistence the data", "self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all,", "recover data from annotate file or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path )", "# If bg was not added, it will be added to the image", "loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the final", "Select samples from positives and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update", "else: \"\"\"In the extreme case where num_rois = 1, we pick a random", "of overlapping bounding boxes from RPN = {}\" message += \" for {}", "3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update", "Faster R-CNN self.model_rpn = None self.model_classifier = None self.model_all = None # Training", "= True sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5,", "def __setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto()", "(neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives samples and Y negatives", "= self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a", "to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts from", "in config object self.config.class_mapping = self.class_mapping # Show resume from loaded data self.show_info_data()", "training process self.num_images = len(self.all_data) self.train_images = [s for s in self.all_data if", ") # Generate row for epoch info info = [] # add data", "bounding boxes from RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier", "= [] # add data to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc)", "Instance a ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert", "# Select samples from positives and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) #", "80% training and 20% for validation self.train_images = None self.val_images = None #", "self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained", "\"\"\"Show the average number of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total)", "pos_samples = pos_samples[0] else: pos_samples = [] return (neg_samples, pos_samples) def __select_samples(self, neg_samples,", "Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val", "Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr()", "-*- from __future__ import division import random import pprint import sys import time", ") self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and negatives samples sel_samples =", "create other data structures. \"\"\" # Randomize data random.shuffle(self.all_data) # Set for training", "like a trainable object. self.__compile_models() def __compile_models(self): \"\"\" Create optimizers and compile models.\"\"\"", "= \"training_results/1\" trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset,", "and negatives samples.\"\"\" if len(neg_samples) > 0: # Just choose the first one", "from frcnn.utilities.config import Config from frcnn.utilities.parser import Parser from frcnn.utilities.history import History class", "0 best_loss = np.Inf # Start iterative process print(\"The training has begun :)\")", "for training process self.num_images = len(self.all_data) self.train_images = [s for s in self.all_data", "# Generate row for epoch info info = [] # add data to", "*= len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois),", "# Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' )", "Print the resume of the epoch if self.config.verbose: message = 'Mean number of", "average number of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /=", ") self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self):", "sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num,", "shuffle the data and create other data structures. \"\"\" # Randomize data random.shuffle(self.all_data)", "that it will use for training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes", "Model([img_input, self.roi_input], rpn[:2] + classifier) # Use to load/save weights for the models.", "= self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models for Faster R-CNN. self.model_rpn =", "self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the final losses", "[s for s in self.all_data if s['imageset'] == 'trainval'] self.val_images = [s for", "Neural Network self.cnn = None # Data generators self.data_gen_train = None self.data_gen_val =", "== self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) #", "{}'.format(e)) print(\"Couldn't load pretrained model weights.\") print(\"Weights can be found in the keras", "bar for display progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num +", "self.all_data if s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images)))", "data dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping)", "self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights from a pretrained", "# Define the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN,", "weights according to classifier batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]],", "= self.__select_samples(neg_samples, pos_samples) # Update losses, for class detector and RPN self.__update_losses(sel_samples, iter_num,", "by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't load", "+ \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\", weights_input_path=weights_input_path, num_epochs=1 ) trainer.save_config(results_path", "object self.config.class_mapping = self.class_mapping # Show resume from loaded data self.show_info_data() def show_info_data(self):", "= [] # Print the resume of the epoch if self.config.verbose: message =", "from keras import backend as K from keras.optimizers import Adam, SGD, RMSprop from", "message = \"Average number of overlapping bounding boxes from RPN = {}\" message", "\"classifier\") self.history.save_summary(self.model_all, \"all\") # test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\")", "CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for image in TensorFlow self.input_shape_image", "for s in self.all_data if s['imageset'] == 'trainval'] self.val_images = [s for s", "training and 20% for validation self.train_images = None self.val_images = None # Convolutional", "on the GPU config_gpu.gpu_options.allow_growth = True # to log device placement (on which", "calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn", "= {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence the", "__print_average_bbxes(self): \"\"\"Show the average number of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes =", "Interest self.roi_input = Input(shape=(None, 4)) # Models for Faster R-CNN self.model_rpn = None", "Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor =", "the current loss is better. if curr_loss < best_loss: message = 'Total loss", "curr_loss < best_loss: message = 'Total loss decreased from {} to {}, saving", "models.\"\"\" learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer =", "import random import pprint import sys import time import pickle import logging import", "= None self.all_data = [] self.classes_count = [] self.class_mapping = [] self.num_images =", "on the base layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it will assign", "model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row for epoch info info =", "from loaded data self.show_info_data() def show_info_data(self): \"\"\"Show data that it will use for", "{}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr))", "detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls", "self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error ) # test save summaries self.history.save_summary(self.model_rpn,", "loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr", "self.results_path = results_path # Datasets for training, split 80% training and 20% for", "= Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data,", "import LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config import", "to load/save weights for the models. self.__load_weights() # Save the models like a", "self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) # This is a", "frcnn.cnn import CNN from frcnn.utilities.config import Config from frcnn.utilities.parser import Parser from frcnn.utilities.history", "1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ] )", "s['imageset'] == 'trainval'] self.val_images = [s for s in self.all_data if s['imageset'] ==", "= np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create the", "= self.cnn.build_nn_base(img_input) # Define the RPN, built on the base layers. rpn =", "= np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace parameter determines", ") # Build models for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier =", "pos_samples): \"\"\"Select X positives samples and Y negatives samples for complete number RoIs.", "= curr_loss # Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate", "import tensorflow as tf from keras import backend as K from keras.optimizers import", "= None self.results_path = results_path # Datasets for training, split 80% training and", "for validation self.train_images = None self.val_images = None # Convolutional Neural Network self.cnn", "bar in the current epoch progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]),", "= ans # If bg was not added, it will be added to", "from a pretrained model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path,", "+ 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss", "Define the RPN, built on the base layers. rpn = self.cnn.create_rpn(shared_layers) # Define", "{} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message = \"RPN is", "+ loss_class_cls + loss_class_regr print('Best loss: {} vs current loss: {}'.format(best_loss, curr_loss)) #", "s in self.all_data if s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val", "+ 1)) ] ) iter_num += 1 # If the current epoch is", "'Classifier accuracy for bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls))", "some epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu =", "__setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() #", "self.all_data, self.classes_count, self.class_mapping = ans # If bg was not added, it will", "models self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier) # Use to load/save weights", "iter_num = 0 break except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training", "{}\" message += \" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes ==", "// 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2,", "Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:,", "bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose:", "dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) #", "or create annotate file from dataset. \"\"\" # Instance parser, recover data from", "max_boxes=300 ) # Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True", "the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average number of", "generic_utils from frcnn.data_generator import Metrics, Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers import", "return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives samples and Y", "- len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace parameter determines whether or not the", "self.__build_frcnn() # Iterative process iter_num = 0 best_loss = np.Inf # Start iterative", "If bg was not added, it will be added to the image data", "trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path", "time.time() # init time for current epoch # Instance progress bar for display", "rpn[:2] + classifier) # Use to load/save weights for the models. self.__load_weights() #", "= time.time() - start_time self.rpn_accuracy_for_epoch = [] # Print the resume of the", "Set for training process self.num_images = len(self.all_data) self.train_images = [s for s in", "process print(\"The training has begun :)\") for epoch_num in range(self.config.num_epochs): start_time = time.time()", "hyperparameters before the training process.\"\"\" # Config file self.config.data_augmentation = data_augmentation self.config.num_rois =", "best_loss if __name__ == '__main__': results_path = \"training_results/1\" trainer = Trainer(results_path) weights_input_path =", "recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate file or create annotate", "Convolutional Neural Network self.cnn = None # Data generators self.data_gen_train = None self.data_gen_val", "2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False", "= Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count,", "self.config.num_epochs)) while True: try: # If an epoch is completed + allowed verbose,", "break except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p')", "pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9,", "keras.models import Model from keras.utils import generic_utils from frcnn.data_generator import Metrics, Utils from", "weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" )", "convolutional neural network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor", "import traceback from optparse import OptionParser import numpy as np import tensorflow as", "= generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try: # If an", "\"\"\"Load weights from a pretrained model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True)", "neural network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for", "self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models for Faster R-CNN. self.model_rpn = Model(img_input,", "# Update losses, for class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2,", "-1] == 0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) #", "self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't", "loss_rpn, X, X2, Y1, Y2): \"\"\"Update losses for RPN and classifier.\"\"\" # Calculate", "info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time)", "info info = [] # add data to info list info.append(epoch_num + 1)", "config_f) message = 'Config has been written to {}, and can be '", ") except Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model weights.\") print(\"Weights", "of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor", "selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples = np.random.choice(", "+ 1, self.config.num_epochs)) while True: try: # If an epoch is completed +", "Used to load/save weights for the models self.model_all = Model([img_input, self.roi_input], rpn[:2] +", "self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 )", "logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the memory used on", "first one neg_samples = neg_samples[0] else: # Leave the negative samples list empty", "RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts", "self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) # calc", "= None self.model_classifier = None self.model_all = None # Training process self.iter_num =", "2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self,", "placement (on which device the operation ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu)", "= len(self.class_mapping) # Mapping persistence in config object self.config.class_mapping = self.class_mapping # Show", "of the epoch if self.config.verbose: message = 'Mean number of bounding boxes from", "\"\"\" # Instance parser, recover data from annotate file or dataset self.parser =", "self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn = CNN( self.num_anchors, (self.roi_input,", "will be added to the image data dictionaries. if 'bg' not in self.classes_count:", "the average number of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes", "import numpy as np import tensorflow as tf from keras import backend as", "= None self.rpn_accuracy_for_epoch = None self.history = History(results_path) # System and session setup", "level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the memory used on the", "selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else:", "img_data = next(self.data_gen_train) # calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) #", "1, we pick a random pos or neg sample. \"\"\" selected_pos_samples = pos_samples.tolist()", "print the average number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni", "np.where(Y1[0, :, -1] == 1) pos_samples = np.where(Y1[0, :, -1] == 0) neg_samples,", "def __load_weights(self): \"\"\"Load weights from a pretrained model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path))", "= None # Training process self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor =", "the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num = 0 best_loss =", "e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize data", "print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message = \"RPN is not producing bounding", "start_time ) iter_num = 0 break except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e))", "as tf from keras import backend as K from keras.optimizers import Adam, SGD,", "epoch info info = [] # add data to info list info.append(epoch_num +", "float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch = [] # Print", "neg_samples = np.where(Y1[0, :, -1] == 1) pos_samples = np.where(Y1[0, :, -1] ==", "open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message = 'Config has been written to", "annotate file or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data", "self.num_images = 0 self.num_anchors = 0 self.input_shape_image = None self.results_path = results_path #", "to {}, and can be ' message += 'loaded when testing to ensure", "self.classes_count = [] self.class_mapping = [] self.num_images = 0 self.num_anchors = 0 self.input_shape_image", "self.all_data = [] self.classes_count = [] self.class_mapping = [] self.num_images = 0 self.num_anchors", "self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier,", "None, 3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate file", "loss='mae' # Mean Absolute Error ) # test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier,", "samples.\"\"\" if len(neg_samples) > 0: # Just choose the first one neg_samples =", "from keras.optimizers import Adam, SGD, RMSprop from keras.layers import Input from keras.models import", "total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time", "self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message = 'Config", "roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to ROI roi", "sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch =", "previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message = \"RPN is not", "self.parser = None self.all_data = [] self.classes_count = [] self.class_mapping = [] self.num_images", "= loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] =", "\"\"\"Show data that it will use for training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count)", "# Define classifier, it will assign the class of the detected objects. classifier", "\"\"\"In the extreme case where num_rois = 1, we pick a random pos", "save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights from", "= pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples", "[Y1[:, sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1]", "optimizer='sgd', loss='mae' # Mean Absolute Error ) # test save summaries self.history.save_summary(self.model_rpn, \"rpn\")", "the RPN, built on the base layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier,", "setup self.__setup() def __setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu", "\"\"\"Setup training and run for some epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__()", "message = 'Classifier accuracy for bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN", "= selected_pos_samples + selected_neg_samples else: \"\"\"In the extreme case where num_rois = 1,", "# System and session setup self.__setup() def __setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000)", "split 80% training and 20% for validation self.train_images = None self.val_images = None", "replacement (default this parameter takes the value False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples,", "\"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num = 0 best_loss", "= np.Inf # Start iterative process print(\"The training has begun :)\") for epoch_num", "self.__prepare_train() self.__build_frcnn() # Iterative process iter_num = 0 best_loss = np.Inf # Start", "of Interest self.roi_input = Input(shape=(None, 4)) # Models for Faster R-CNN self.model_rpn =", "and can be ' message += 'loaded when testing to ensure correct results'", "< best_loss: message = 'Total loss decreased from {} to {}, saving weights'", "message = 'Total loss decreased from {} to {}, saving weights' print(message.format(best_loss, curr_loss))", "https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average number of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor)", "made with replacement (default this parameter takes the value False). \"\"\" selected_neg_samples =", "will assign the class of the detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count)", "= self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num = 0 break except Exception as", "time.time() - start_time self.rpn_accuracy_for_epoch = [] # Print the resume of the epoch", "print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr +", "for display progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1,", "TensorFlow self.input_shape_image = (None, None, 3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover", "'loaded when testing to ensure correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster", "range(self.config.num_epochs): start_time = time.time() # init time for current epoch # Instance progress", "best_loss, start_time): \"\"\"Update the final losses after the epochs ends.\"\"\" # Average losses", "has begun :)\") for epoch_num in range(self.config.num_epochs): start_time = time.time() # init time", "self).__init__() self.config = Config() self.config.use_gpu = use_gpu self.parser = None self.all_data = []", "\"\"\"Create the unified model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define the base", "def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate file or create", "negatives samples.\"\"\" if len(neg_samples) > 0: # Just choose the first one neg_samples", "import OptionParser import numpy as np import tensorflow as tf from keras import", "'trainval'] self.val_images = [s for s in self.all_data if s['imageset'] == 'test'] print('Num", "load/save weights for the models. self.__load_weights() # Save the models like a trainable", "replace=False ).tolist() except: \"\"\"The replace parameter determines whether or not the selection is", "verbose, then: # print the average number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor)", "iter_num, loss_rpn, X, X2, Y1, Y2): \"\"\"Update losses for RPN and classifier.\"\"\" #", "if __name__ == '__main__': results_path = \"training_results/1\" trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\"", "import pprint import sys import time import pickle import logging import traceback from", "keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average number of overlapping", "losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2])", "trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\", weights_input_path=weights_input_path, num_epochs=1 ) trainer.save_config(results_path + \"/config.pickle\") trainer.train()", "from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss", "a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples else: \"\"\"In", "self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and positive samples (IoU > thresh) neg_samples", "pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples", "self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for image in TensorFlow self.input_shape_image =", "import Parser from frcnn.utilities.history import History class Trainer(object): \"\"\"Setup training and run for", "(IoU > thresh) neg_samples = np.where(Y1[0, :, -1] == 1) pos_samples = np.where(Y1[0,", "self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num,", "= 'Config has been written to {}, and can be ' message +=", "the best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row for epoch info", "sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] =", "and run for some epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config =", "data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the training process.\"\"\"", "LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config import Config", "print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss", "(self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for image in TensorFlow self.input_shape_image = (None,", "random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2): \"\"\"Update", "== 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create data", "test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights", "RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) #", ") self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae'", "the training process.\"\"\" # Config file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path", "config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the memory used on the GPU config_gpu.gpu_options.allow_growth", "try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace", "loss: {} vs current loss: {}'.format(best_loss, curr_loss)) # Update the best loss if", "from frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN from", "= neg_samples[0] else: # Leave the negative samples list empty neg_samples = []", "If an epoch is completed + allowed verbose, then: # print the average", "the unified model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define the base network", "shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN, built on the base layers. rpn", "bg) = {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence", "to (x,y,w,h) format X2, Y1, Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping )", "from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as e: print('Exception:", "float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = \"Average number of overlapping", "= self.cnn.create_rpn(shared_layers) # Define classifier, it will assign the class of the detected", "None self.all_data = [] self.classes_count = [] self.class_mapping = [] self.num_images = 0", "= np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples )", "# Data generators self.data_gen_train = None self.data_gen_val = None # Input Tensor Regions", "for complete number RoIs. \"\"\" if self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois", "for some epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu", "generate_annotate=False): \"\"\"Recover data from annotate file or create annotate file from dataset. \"\"\"", "1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4])", "= np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time =", "# Just choose the first one neg_samples = neg_samples[0] else: # Leave the", "be ' message += 'loaded when testing to ensure correct results' print(message.format(config_output_filename)) def", "results_path # Datasets for training, split 80% training and 20% for validation self.train_images", "utf-8 -*- from __future__ import division import random import pprint import sys import", "= float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch = [] #", "keras.utils import generic_utils from frcnn.data_generator import Metrics, Utils from frcnn.losses import LossesCalculator from", "print('Num classes (including bg) = {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self,", "continue # Get negatives samples and positive samples (IoU > thresh) neg_samples =", "= pos_samples[0] else: pos_samples = [] return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples):", "__update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2): \"\"\"Update losses for RPN and", "== '__main__': results_path = \"training_results/1\" trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset =", "frcnn.utilities.history import History class Trainer(object): \"\"\"Setup training and run for some epochs.\"\"\" def", "process.\"\"\" # Config file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path", "saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save the best model self.history.save_best_model(", "for training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count)))", "# dynamically grow the memory used on the GPU config_gpu.gpu_options.allow_growth = True #", "{}'.format(best_loss, curr_loss)) # Update the best loss if the current loss is better.", "num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length", "the current epoch is completed if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num,", "producing bounding boxes that overlap the \" message += \"ground truth boxes. Check", "self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) # calc loss for RPN loss_rpn =", "use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu = use_gpu self.parser = None self.all_data", "True sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32,", "config data for training process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as config_f:", "> 1: if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples", "def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the final losses after the epochs ends.\"\"\"", "{}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence the config", "Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans # If", "OptionParser import numpy as np import tensorflow as tf from keras import backend", "or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries ans", "= sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch", "try: # If an epoch is completed + allowed verbose, then: # print", "boxes. Check RPN settings or keep training.\" print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format", "2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ] ) iter_num += 1", "print('Best loss: {} vs current loss: {}'.format(best_loss, curr_loss)) # Update the best loss", "info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ == '__main__': results_path =", "loss_class_regr print('Best loss: {} vs current loss: {}'.format(best_loss, curr_loss)) # Update the best", "is not producing bounding boxes that overlap the \" message += \"ground truth", "save_config(self, config_output_filename): \"\"\"Do persistence the config data for training process.\"\"\" self.config.config_output_filename = config_output_filename", "the operation ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation,", "operation ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois,", "= Model([img_input, self.roi_input], classifier) # This is a model that holds both the", "trainable object. self.__compile_models() def __compile_models(self): \"\"\" Create optimizers and compile models.\"\"\" learning_rate =", "/= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = \"Average number of overlapping bounding boxes", "1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4]", "mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = \"Average number of overlapping bounding", "a model that holds both the RPN and the classifier... # Used to", "def __print_average_bbxes(self): \"\"\"Show the average number of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes", "the selection is made with replacement (default this parameter takes the value False).", "Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN", "self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ] ) iter_num +=", "self.config = Config() self.config.use_gpu = use_gpu self.parser = None self.all_data = [] self.classes_count", "\"\"\"Select X positives samples and Y negatives samples for complete number RoIs. \"\"\"", "None # Training process self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor = None", "self.config.class_mapping = self.class_mapping # Show resume from loaded data self.show_info_data() def show_info_data(self): \"\"\"Show", "__init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu = use_gpu self.parser =", "self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs", "structures. \"\"\" # Randomize data random.shuffle(self.all_data) # Set for training process self.num_images =", "{}, and can be ' message += 'loaded when testing to ensure correct", "epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the training process.\"\"\" # Config file self.config.data_augmentation =", "samples and Y negatives samples for complete number RoIs. \"\"\" if self.config.num_rois >", "empty neg_samples = [] if len(pos_samples) > 0: pos_samples = pos_samples[0] else: pos_samples", "len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = \"Average number of overlapping bounding boxes from", "is made with replacement (default this parameter takes the value False). \"\"\" selected_neg_samples", "training process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message", "data self.show_info_data() def show_info_data(self): \"\"\"Show data that it will use for training.\"\"\" print('Training", "= self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier", "# Instance progress bar for display progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length)", "Regions of Interest self.roi_input = Input(shape=(None, 4)) # Models for Faster R-CNN self.model_rpn", "s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create", "number of bounding boxes from RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message", "__compile_models(self): \"\"\" Create optimizers and compile models.\"\"\" learning_rate = self.config.learning_rate num_classes = len(self.classes_count)", "num_rois = 1, we pick a random pos or neg sample. \"\"\" selected_pos_samples", "# Iterative process iter_num = 0 best_loss = np.Inf # Start iterative process", "to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr)", "losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[", "= [] self.class_mapping = [] self.num_images = 0 self.num_anchors = 0 self.input_shape_image =", "configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the", "{}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss: {}", "loss is better. if curr_loss < best_loss: message = 'Total loss decreased from", "info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__", "in TensorFlow self.input_shape_image = (None, None, 3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False):", "to the image data dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg'] = 0", "number of overlapping bounding boxes from RPN = {}\" message += \" for", "from {} to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save", "Input Tensor Regions of Interest self.roi_input = Input(shape=(None, 4)) # Models for Faster", "pos_samples): \"\"\"Format positives and negatives samples.\"\"\" if len(neg_samples) > 0: # Just choose", "pred_rpn[1], use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1,", "= [] def __build_frcnn(self): \"\"\"Create the unified model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image)", "Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to ROI", "= next(self.data_gen_train) # calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred", "X, Y, img_data = next(self.data_gen_train) # calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X,", "None self.rpn_accuracy_for_epoch = None self.history = History(results_path) # System and session setup self.__setup()", "= None # Convolutional Neural Network self.cnn = None # Data generators self.data_gen_train", "\"training_results/1\" trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False,", "CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses", "= loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the final losses after the", "positives and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses, for class", "the models like a trainable object. self.__compile_models() def __compile_models(self): \"\"\" Create optimizers and", "and 20% for validation self.train_images = None self.val_images = None # Convolutional Neural", "RPN and the classifier... # Used to load/save weights for the models self.model_all", "# calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with RPN", "roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2) to", "the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN, built on", "X, X2, Y1, Y2): \"\"\"Update losses for RPN and classifier.\"\"\" # Calculate weights", "Just choose the first one neg_samples = neg_samples[0] else: # Leave the negative", "Config from frcnn.utilities.parser import Parser from frcnn.utilities.history import History class Trainer(object): \"\"\"Setup training", "self.config.verbose: message = 'Mean number of bounding boxes from RPN overlapping ground truth", "self.rpn_accuracy_for_epoch = None self.history = History(results_path) # System and session setup self.__setup() def", "self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate file or create annotate file", "trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\",", "choose the first one neg_samples = neg_samples[0] else: # Leave the negative samples", "from RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for", "History(results_path) # System and session setup self.__setup() def __setup(self): \"\"\"System and session, setup.\"\"\"", "1 # If the current epoch is completed if iter_num == self.config.epoch_length: best_loss", "import pickle import logging import traceback from optparse import OptionParser import numpy as", "iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message = \"RPN is not producing", "Update progress bar in the current epoch progress_bar.update( iter_num + 1, [ ('rpn_cls',", "self.rpn_accuracy_for_epoch = [] # Print the resume of the epoch if self.config.verbose: message", "and classifier.\"\"\" # Calculate weights according to classifier batch training. loss_class = self.model_classifier.train_on_batch(", "loss_class_cls + loss_class_regr print('Best loss: {} vs current loss: {}'.format(best_loss, curr_loss)) # Update", "unified model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define the base network (VGG16)", "network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN, built on the base", "neg_samples = [] if len(pos_samples) > 0: pos_samples = pos_samples[0] else: pos_samples =", "self.history.save_summary(self.model_all, \"all\") # test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def", "If the current epoch is completed if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch(", "dynamically grow the memory used on the GPU config_gpu.gpu_options.allow_growth = True # to", "pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format X2,", "self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2) # Update progress bar in the", "overlap the \" message += \"ground truth boxes. Check RPN settings or keep", "mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch = []", "'bg' not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence", "the \" message += \"ground truth boxes. Check RPN settings or keep training.\"", "Parser from frcnn.utilities.history import History class Trainer(object): \"\"\"Setup training and run for some", "= len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn = CNN(", "of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1", "epoch_num, best_loss, start_time): \"\"\"Update the final losses after the epochs ends.\"\"\" # Average", "time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss:", "when testing to ensure correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster R-CNN.\"\"\"", "val samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config,", "self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else:", "pos_samples = np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples", "= None self.history = History(results_path) # System and session setup self.__setup() def __setup(self):", "mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = \"Average number", "epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu = use_gpu", "print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed", "annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate file or create annotate file from dataset.", "self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ] ) iter_num += 1 # If", "self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create the unified model Faster", "overlapping bounding boxes from RPN = {}\" message += \" for {} previous", "2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes", "len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count)", "\"\"\"Format positives and negatives samples.\"\"\" if len(neg_samples) > 0: # Just choose the", "num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate)", "info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ == '__main__': results_path = \"training_results/1\" trainer", "model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define the base network (VGG16) shared_layers", "Input(shape=(None, 4)) # Models for Faster R-CNN self.model_rpn = None self.model_classifier = None", "RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time:", "if 'bg' not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping", "and session setup self.__setup() def __setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)", "self.model_all, self.config.weights_output_path ) # Generate row for epoch info info = [] #", "np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois", "import time import pickle import logging import traceback from optparse import OptionParser import", "else: sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2,", "= Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) # This is a model", "current epoch is completed if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss,", "+= \"ground truth boxes. Check RPN settings or keep training.\" print(message) def __validate_samples(self,", "parser, recover data from annotate file or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path", "losses after the epochs ends.\"\"\" # Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr", "tf from keras import backend as K from keras.optimizers import Adam, SGD, RMSprop", "data structures. \"\"\" # Randomize data random.shuffle(self.all_data) # Set for training process self.num_images", "models like a trainable object. self.__compile_models() def __compile_models(self): \"\"\" Create optimizers and compile", "added, it will be added to the image data dictionaries. if 'bg' not", "both the RPN and the classifier... # Used to load/save weights for the", "{}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier:", "0: # Just choose the first one neg_samples = neg_samples[0] else: # Leave", "self.__setup() def __setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu =", "LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr()", "weights from a pretrained model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights(", "better. if curr_loss < best_loss: message = 'Total loss decreased from {} to", "CNN from frcnn.utilities.config import Config from frcnn.utilities.parser import Parser from frcnn.utilities.history import History", "pick a random pos or neg sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples =", "pickle import logging import traceback from optparse import OptionParser import numpy as np", "for s in self.all_data if s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num", "self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time):", "Iterative process iter_num = 0 best_loss = np.Inf # Start iterative process print(\"The", "[] return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives samples and", "loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2]", "# Config file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path", "keras import backend as K from keras.optimizers import Adam, SGD, RMSprop from keras.layers", "annotate_path=annotate_path ) # Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping =", "Input(shape=self.input_shape_image) # Define the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the", "and the classifier... # Used to load/save weights for the models self.model_all =", "(including bg) = {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do", "loss decreased from {} to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss", "#traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize data generators,", "== 0: message = \"RPN is not producing bounding boxes that overlap the", "RPN = {}\" message += \" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if", "\" message += \"ground truth boxes. Check RPN settings or keep training.\" print(message)", "self.cnn.create_rpn(shared_layers) # Define classifier, it will assign the class of the detected objects.", "info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ == '__main__':", ":, -1] == 1) pos_samples = np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples", "the first one neg_samples = neg_samples[0] else: # Leave the negative samples list", "= roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue", "[s for s in self.all_data if s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images)))", "= ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to ROI roi =", "0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3])", "classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models for Faster R-CNN. self.model_rpn", "self.class_mapping # Show resume from loaded data self.show_info_data() def show_info_data(self): \"\"\"Show data that", "from annotate file or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get", "self.all_data if s['imageset'] == 'trainval'] self.val_images = [s for s in self.all_data if", "model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except", "= [] self.num_images = 0 self.num_anchors = 0 self.input_shape_image = None self.results_path =", "info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return", "__load_weights(self): \"\"\"Load weights from a pretrained model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path,", "self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num = 0 break except Exception as e:", "models. self.__load_weights() # Save the models like a trainable object. self.__compile_models() def __compile_models(self):", "best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row for epoch info info", "total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message", "' message += 'loaded when testing to ensure correct results' print(message.format(config_output_filename)) def train(self):", "self.history = History(results_path) # System and session setup self.__setup() def __setup(self): \"\"\"System and", "of the detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models", "bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression:", "len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace parameter determines whether or not the selection", "in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in config", "RMSprop from keras.layers import Input from keras.models import Model from keras.utils import generic_utils", "def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before", "classifier... # Used to load/save weights for the models self.model_all = Model([img_input, self.roi_input],", "epoch is completed if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time", "if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) # calc loss", "# Convolutional Neural Network self.cnn = None # Data generators self.data_gen_train = None", "Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0)", "selection is made with replacement (default this parameter takes the value False). \"\"\"", "self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in config object self.config.class_mapping = self.class_mapping #", "= 0 self.input_shape_image = None self.results_path = results_path # Datasets for training, split", "< self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois", "file from dataset. \"\"\" # Instance parser, recover data from annotate file or", "roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue #", "self.__select_samples(neg_samples, pos_samples) # Update losses, for class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn,", "overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor =", "config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message = 'Config has been", "sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples =", "config_f: pickle.dump(self.config, config_f) message = 'Config has been written to {}, and can", "\"all\") # test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self):", "best_loss = np.Inf # Start iterative process print(\"The training has begun :)\") for", "num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors", "dataset. \"\"\" # Instance parser, recover data from annotate file or dataset self.parser", "best_loss, start_time ) iter_num = 0 break except Exception as e: #traceback.print_exc() print('Exception:", "from dataset. \"\"\" # Instance parser, recover data from annotate file or dataset", "weights for the models. self.__load_weights() # Save the models like a trainable object.", "create annotate file from dataset. \"\"\" # Instance parser, recover data from annotate", "batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:,", "pos_samples[0] else: pos_samples = [] return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select", "the GPU config_gpu.gpu_options.allow_growth = True # to log device placement (on which device", "the epochs ends.\"\"\" # Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:,", "LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, )", "if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and positive", "= num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales)", "= (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data =", "__validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and negatives samples.\"\"\" if len(neg_samples) > 0: #", "holds both the RPN and the classifier... # Used to load/save weights for", "= roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h)", "self.model_all = None # Training process self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor", "None self.history = History(results_path) # System and session setup self.__setup() def __setup(self): \"\"\"System", "len(self.classes_count) ) # Tensor for image in TensorFlow self.input_shape_image = (None, None, 3)", "\"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically", "# Mapping persistence in config object self.config.class_mapping = self.class_mapping # Show resume from", "= CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for image in TensorFlow", "+= 1 # If the current epoch is completed if iter_num == self.config.epoch_length:", "negative samples list empty neg_samples = [] if len(pos_samples) > 0: pos_samples =", "resume of the epoch if self.config.verbose: message = 'Mean number of bounding boxes", "0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in config object self.config.class_mapping = self.class_mapping", "import sys import time import pickle import logging import traceback from optparse import", "not the selection is made with replacement (default this parameter takes the value", "CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = []", "the final losses after the epochs ends.\"\"\" # Average losses loss_rpn_cls = np.mean(self.losses[:,", "= np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr =", "GPU config_gpu.gpu_options.allow_growth = True # to log device placement (on which device the", "in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True:", "Config() self.config.use_gpu = use_gpu self.parser = None self.all_data = [] self.classes_count = []", "selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples)", "(VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN, built on the base layers.", "4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the final losses after", "Training process self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch =", "== 0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select", "np import tensorflow as tf from keras import backend as K from keras.optimizers", "# Training process self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch", "try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception", "after the epochs ends.\"\"\" # Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr =", "Datasets for training, split 80% training and 20% for validation self.train_images = None", "# Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row for", "from keras.utils import generic_utils from frcnn.data_generator import Metrics, Utils from frcnn.losses import LossesCalculator", "len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer,", "None self.data_gen_val = None # Input Tensor Regions of Interest self.roi_input = Input(shape=(None,", "pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and negatives samples sel_samples", "[] message = \"Average number of overlapping bounding boxes from RPN = {}\"", "(default this parameter takes the value False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois", "Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2, ious = roi_helper.calc_iou(", "dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries ans =", "Network self.cnn = None # Data generators self.data_gen_train = None self.data_gen_val = None", "pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper =", "- start_time self.rpn_accuracy_for_epoch = [] # Print the resume of the epoch if", "self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate #", "sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2): \"\"\"Update losses for", "('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch',", "def __compile_models(self): \"\"\" Create optimizers and compile models.\"\"\" learning_rate = self.config.learning_rate num_classes =", "class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2) # Update", "5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create the unified model", "it will assign the class of the detected objects. classifier = self.cnn.build_classifier( shared_layers,", "to ensure correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn()", "print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss", "4)) # Models for Faster R-CNN self.model_rpn = None self.model_classifier = None self.model_all", "{}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding boxes from RPN: {}' print(message.format(class_acc))", "parameter takes the value False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples),", "print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num", "3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate file or", "be found in the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the", "show_info_data(self): \"\"\"Show data that it will use for training.\"\"\" print('Training images per class:')", "training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count))) #", "ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config import Config from frcnn.utilities.parser import Parser", "np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace parameter determines whether", "iter_num, loss_rpn, X, X2, Y1, Y2) # Update progress bar in the current", "sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5):", "loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the final losses after the epochs", "from __future__ import division import random import pprint import sys import time import", "keras.layers import Input from keras.models import Model from keras.utils import generic_utils from frcnn.data_generator", "self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ],", "[X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0]", "Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model weights.\") print(\"Weights can be", "if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the memory used on the GPU", "optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes):", "exiting :p') def __prepare_train(self): \"\"\"Initialize data generators, shuffle the data and create other", "self.show_info_data() def show_info_data(self): \"\"\"Show data that it will use for training.\"\"\" print('Training images", "pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives samples and Y negatives samples", "loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute", "img_input = Input(shape=self.input_shape_image) # Define the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) #", "if self.config.verbose: message = 'Mean number of bounding boxes from RPN overlapping ground", "import History class Trainer(object): \"\"\"Setup training and run for some epochs.\"\"\" def __init__(self,", "the RPN and the classifier... # Used to load/save weights for the models", "self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images,", "Show resume from loaded data self.show_info_data() def show_info_data(self): \"\"\"Show data that it will", "[] if len(pos_samples) > 0: pos_samples = pos_samples[0] else: pos_samples = [] return", "replace parameter determines whether or not the selection is made with replacement (default", "\"\"\"Update the final losses after the epochs ends.\"\"\" # Average losses loss_rpn_cls =", "# This is a model that holds both the RPN and the classifier...", "Save the models like a trainable object. self.__compile_models() def __compile_models(self): \"\"\" Create optimizers", ") trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\", weights_input_path=weights_input_path, num_epochs=1 ) trainer.save_config(results_path + \"/config.pickle\")", "'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error ) # test save", "from keras.models import Model from keras.utils import generic_utils from frcnn.data_generator import Metrics, Utils", "boxes that overlap the \" message += \"ground truth boxes. Check RPN settings", "ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to", "load pretrained model weights.\") print(\"Weights can be found in the keras application folder", "self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and negatives samples sel_samples = self.__select_samples(neg_samples,", "data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs", "class of the detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build", "save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test save plots self.history.save_model_image(self.model_rpn,", "list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info)", "Calculate weights according to classifier batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples,", "sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2): \"\"\"Update losses for RPN and classifier.\"\"\"", "= Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path +", "self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) # calc loss for RPN loss_rpn", "{} vs current loss: {}'.format(best_loss, curr_loss)) # Update the best loss if the", "= np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total)", "load/save weights for the models self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier) #", "import Config from frcnn.utilities.parser import Parser from frcnn.utilities.history import History class Trainer(object): \"\"\"Setup", "self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *=", "= self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]] )", "a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace parameter determines whether or", "selected_pos_samples + selected_neg_samples else: \"\"\"In the extreme case where num_rois = 1, we", "= self.class_mapping # Show resume from loaded data self.show_info_data() def show_info_data(self): \"\"\"Show data", "the best loss if the current loss is better. if curr_loss < best_loss:", "(x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping", "network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) # Tensor for image", "data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val =", "Y2): \"\"\"Update losses for RPN and classifier.\"\"\" # Calculate weights according to classifier", "self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = []", "best_loss = curr_loss # Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) #", "cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) # calc loss for", "('epoch', int(epoch_num + 1)) ] ) iter_num += 1 # If the current", "the value False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist()", "message += \"ground truth boxes. Check RPN settings or keep training.\" print(message) def", "as K from keras.optimizers import Adam, SGD, RMSprop from keras.layers import Input from", "def __build_frcnn(self): \"\"\"Create the unified model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define", "samples from positives and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses,", "len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch = [] # Print the resume", "loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3]", "weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate", "def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives samples and Y negatives samples for", "generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try: # If an epoch", "learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate)", "Absolute Error ) # test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\")", "self.class_mapping = ans # If bg was not added, it will be added", "self.history.append_epoch_info(info) return best_loss if __name__ == '__main__': results_path = \"training_results/1\" trainer = Trainer(results_path)", "takes the value False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True", "= None # Data generators self.data_gen_train = None self.data_gen_val = None # Input", "def save_config(self, config_output_filename): \"\"\"Do persistence the config data for training process.\"\"\" self.config.config_output_filename =", "assign the class of the detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) )", "process iter_num = 0 best_loss = np.Inf # Start iterative process print(\"The training", "\"\"\"Set hyperparameters before the training process.\"\"\" # Config file self.config.data_augmentation = data_augmentation self.config.num_rois", "data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence the config data for training process.\"\"\"", "R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input)", "= [s for s in self.all_data if s['imageset'] == 'test'] print('Num train samples", "if s['imageset'] == 'trainval'] self.val_images = [s for s in self.all_data if s['imageset']", "self.class_mapping = [] self.num_images = 0 self.num_anchors = 0 self.input_shape_image = None self.results_path", "num_classes=len(self.classes_count) ) # Build models for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier", "epoch_num, best_loss, start_time ) iter_num = 0 break except Exception as e: #traceback.print_exc()", "self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history = History(results_path) #", "0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples", "frcnn.utilities.parser import Parser from frcnn.utilities.history import History class Trainer(object): \"\"\"Setup training and run", "(len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train)", "learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural", "0: message = \"RPN is not producing bounding boxes that overlap the \"", "as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize", "3]), ('epoch', int(epoch_num + 1)) ] ) iter_num += 1 # If the", "regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls +", "1) pos_samples = np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples = self.__validate_samples( neg_samples,", "neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from", "message += \" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0:", "class:') pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count)", "from frcnn.cnn import CNN from frcnn.utilities.config import Config from frcnn.utilities.parser import Parser from", "self.config.epoch_length)) if mean_overlapping_bboxes == 0: message = \"RPN is not producing bounding boxes", "= 1, we pick a random pos or neg sample. \"\"\" selected_pos_samples =", "ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding boxes from", "epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try: #", "training.\" print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and negatives samples.\"\"\" if len(neg_samples)", "self.config.weights_input_path, by_name=True ) except Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model", "positive samples (IoU > thresh) neg_samples = np.where(Y1[0, :, -1] == 1) pos_samples", "= True # to log device placement (on which device the operation ran)", "def train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num =", "image data dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] =", "Instance convolutional neural network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) ) #", "{}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try: # If an epoch is completed", "for the models self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier) # Use to", "\"\"\"The replace parameter determines whether or not the selection is made with replacement", "in the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average number", "bounding boxes from RPN = {}\" message += \" for {} previous iteration(s).\"", "by_name=True ) except Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model weights.\")", "start_time = time.time() # init time for current epoch # Instance progress bar", "import Model from keras.utils import generic_utils from frcnn.data_generator import Metrics, Utils from frcnn.losses", "frcnn.utilities.config import Config from frcnn.utilities.parser import Parser from frcnn.utilities.history import History class Trainer(object):", "from keras.layers import Input from keras.models import Model from keras.utils import generic_utils from", "config_output_filename): \"\"\"Do persistence the config data for training process.\"\"\" self.config.config_output_filename = config_output_filename with", "] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd',", "[] # Print the resume of the epoch if self.config.verbose: message = 'Mean", "# Calculate weights according to classifier batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:,", "self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights from a pretrained model.\"\"\" try: print('Loading weights", "progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num,", "loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn =", "System and session setup self.__setup() def __setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr,", "session setup self.__setup() def __setup(self): \"\"\"System and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu):", "= len(self.all_data) self.train_images = [s for s in self.all_data if s['imageset'] == 'trainval']", "to log device placement (on which device the operation ran) config_gpu.log_device_placement = True", "classifier batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :],", "shared_layers, num_classes=len(self.classes_count) ) # Build models for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2])", "Input from keras.models import Model from keras.utils import generic_utils from frcnn.data_generator import Metrics,", "self.classes_count, self.class_mapping = ans # If bg was not added, it will be", "losses for RPN and classifier.\"\"\" # Calculate weights according to classifier batch training.", "-1] == 1) pos_samples = np.where(Y1[0, :, -1] == 0) neg_samples, pos_samples =", "the class of the detected objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) #", "# Tensor for image in TensorFlow self.input_shape_image = (None, None, 3) def recover_data(", "continue print('Training complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize data generators, shuffle the data", "that overlap the \" message += \"ground truth boxes. Check RPN settings or", "loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]]", "+ loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss: {} vs current loss: {}'.format(best_loss,", "{}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr", "(x,y,w,h) format X2, Y1, Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping ) if", "info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ == '__main__': results_path = \"training_results/1\" trainer =", "cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y, img_data", "= \"Average number of overlapping bounding boxes from RPN = {}\" message +=", "int(epoch_num + 1)) ] ) iter_num += 1 # If the current epoch", "be added to the image data dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg']", "import division import random import pprint import sys import time import pickle import", "self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate", "None self.model_classifier = None self.model_all = None # Training process self.iter_num = 0", "= pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try:", "len(pos_samples) < self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples,", "training and run for some epochs.\"\"\" def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config", "self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence the config data for training process.\"\"\" self.config.config_output_filename", "is a model that holds both the RPN and the classifier... # Used", "optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean", "1: if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples =", "__build_frcnn(self): \"\"\"Create the unified model Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define the", ":]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num,", "= Model([img_input, self.roi_input], rpn[:2] + classifier) # Use to load/save weights for the", "object. self.__compile_models() def __compile_models(self): \"\"\" Create optimizers and compile models.\"\"\" learning_rate = self.config.learning_rate", "classifier, it will assign the class of the detected objects. classifier = self.cnn.build_classifier(", "__future__ import division import random import pprint import sys import time import pickle", "import Adam, SGD, RMSprop from keras.layers import Input from keras.models import Model from", "# Models for Faster R-CNN self.model_rpn = None self.model_classifier = None self.model_all =", "and session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow", "'__main__': results_path = \"training_results/1\" trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\"", "def __init__(self, results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu = use_gpu self.parser", "samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses, for class detector and RPN", "generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt(", ":, -1] == 0) neg_samples, pos_samples = self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples)))", "= LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(),", "Create optimizers and compile models.\"\"\" learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses =", "# Use to load/save weights for the models. self.__load_weights() # Save the models", "print('Num train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train", "loss_rpn, X, X2, Y1, Y2) # Update progress bar in the current epoch", "\"RPN is not producing bounding boxes that overlap the \" message += \"ground", "for bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN", "for RPN and classifier.\"\"\" # Calculate weights according to classifier batch training. loss_class", "K from keras.optimizers import Adam, SGD, RMSprop from keras.layers import Input from keras.models", "= 0 self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history =", "\"all\") def __load_weights(self): \"\"\"Load weights from a pretrained model.\"\"\" try: print('Loading weights from", "\"\"\" # Randomize data random.shuffle(self.all_data) # Set for training process self.num_images = len(self.all_data)", "self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier) # Use to load/save weights for", "self.model_rpn = None self.model_classifier = None self.model_all = None # Training process self.iter_num", "'wb') as config_f: pickle.dump(self.config, config_f) message = 'Config has been written to {},", "print('Num val samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count,", "{}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss", ") # Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2, ious", "sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the memory used", "sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message = \"Average", "= results_path # Datasets for training, split 80% training and 20% for validation", "pos or neg sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0,", "it will be added to the image data dictionaries. if 'bg' not in", "Mapping persistence in config object self.config.class_mapping = self.class_mapping # Show resume from loaded", "len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples else: \"\"\"In the extreme case", "classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression:", "= [s for s in self.all_data if s['imageset'] == 'trainval'] self.val_images = [s", ") if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and", "as np import tensorflow as tf from keras import backend as K from", "self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num,", "process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message =", "X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and positive samples", "= weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate # Trainer", "= random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2):", ") self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error ) # test save summaries", "], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error ) #", "np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) /", "= History(results_path) # System and session setup self.__setup() def __setup(self): \"\"\"System and session,", "memory used on the GPU config_gpu.gpu_options.allow_growth = True # to log device placement", "+ selected_neg_samples else: \"\"\"In the extreme case where num_rois = 1, we pick", "= \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure(", "Tensor Regions of Interest self.roi_input = Input(shape=(None, 4)) # Models for Faster R-CNN", "Models for Faster R-CNN self.model_rpn = None self.model_classifier = None self.model_all = None", "# Mean Absolute Error ) # test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\")", "= Input(shape=self.input_shape_image) # Define the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define", "= None self.data_gen_val = None # Input Tensor Regions of Interest self.roi_input =", "len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes()", "optimizers and compile models.\"\"\" learning_rate = self.config.learning_rate num_classes = len(self.classes_count) losses = LossesCalculator(num_classes,", "self.iter_num = 0 self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history", "Start iterative process print(\"The training has begun :)\") for epoch_num in range(self.config.num_epochs): start_time", "RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector", "1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if", "self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples)", "thresh) neg_samples = np.where(Y1[0, :, -1] == 1) pos_samples = np.where(Y1[0, :, -1]", "samples (IoU > thresh) neg_samples = np.where(Y1[0, :, -1] == 1) pos_samples =", "mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses =", "images per class:') pprint.pprint(self.classes_count) print('Num classes (including bg) = {}'.format(len(self.classes_count))) # Persistence the", "selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace parameter", "except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p') def", "sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses, for class detector and RPN self.__update_losses(sel_samples,", "results_path, use_gpu=False): super(Trainer, self).__init__() self.config = Config() self.config.use_gpu = use_gpu self.parser = None", "Build models for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input],", "sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X, X2, Y1,", "1)) ] ) iter_num += 1 # If the current epoch is completed", "with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper = ROIHelpers(", "epoch # Instance progress bar for display progress in current epoch progress_bar =", "before the training process.\"\"\" # Config file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois", ") # test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test", "self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num", "the resume of the epoch if self.config.verbose: message = 'Mean number of bounding", "Define classifier, it will assign the class of the detected objects. classifier =", "\"\"\" if self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples =", "import backend as K from keras.optimizers import Adam, SGD, RMSprop from keras.layers import", "self.roi_input], rpn[:2] + classifier) # Use to load/save weights for the models. self.__load_weights()", "__select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives samples and Y negatives samples for complete", "random.shuffle(self.all_data) # Set for training process self.num_images = len(self.all_data) self.train_images = [s for", "RPN, built on the base layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it", "if the current loss is better. if curr_loss < best_loss: message = 'Total", "None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and positive samples (IoU >", "self.data_gen_val = None # Input Tensor Regions of Interest self.roi_input = Input(shape=(None, 4))", "generators, shuffle the data and create other data structures. \"\"\" # Randomize data", "losses, for class detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2)", "in self.all_data if s['imageset'] == 'trainval'] self.val_images = [s for s in self.all_data", "e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model weights.\") print(\"Weights can be found in", "img_data, self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives", "Config file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path =", "-*- coding: utf-8 -*- from __future__ import division import random import pprint import", "the image data dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg']", "frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config import Config from frcnn.utilities.parser", "is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and positive samples (IoU", "# Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural network", "self.config.learning_rate = learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance", "print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try: # If an epoch is", "== 'trainval'] self.val_images = [s for s in self.all_data if s['imageset'] == 'test']", "message = \"RPN is not producing bounding boxes that overlap the \" message", "that holds both the RPN and the classifier... # Used to load/save weights", "a trainable object. self.__compile_models() def __compile_models(self): \"\"\" Create optimizers and compile models.\"\"\" learning_rate", ":]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] =", "Check RPN settings or keep training.\" print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives", "\"Average number of overlapping bounding boxes from RPN = {}\" message += \"", "for training, split 80% training and 20% for validation self.train_images = None self.val_images", ").tolist() except: \"\"\"The replace parameter determines whether or not the selection is made", "if mean_overlapping_bboxes == 0: message = \"RPN is not producing bounding boxes that", "bounding boxes that overlap the \" message += \"ground truth boxes. Check RPN", "loss: {}'.format(best_loss, curr_loss)) # Update the best loss if the current loss is", "None self.results_path = results_path # Datasets for training, split 80% training and 20%", "which device the operation ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def configure(", "best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num = 0 break except Exception", "1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num,", "\\ https://github.com/fchollet/keras/tree/master/keras/applications\") def __print_average_bbxes(self): \"\"\"Show the average number of overlapping bboxes.\"\"\" total =", "self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch =", "= None self.val_images = None # Convolutional Neural Network self.cnn = None #", "epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) #", "or keep training.\" print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and negatives samples.\"\"\"", "info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls) info.append(loss_class_regr) info.append(total_time) self.history.append_epoch_info(info) return best_loss if __name__ ==", "overlap_thresh=0.9, max_boxes=300 ) # Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1],", "= Input(shape=(None, 4)) # Models for Faster R-CNN self.model_rpn = None self.model_classifier =", "self.config.weights_output_path ) # Generate row for epoch info info = [] # add", "truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding boxes from RPN:", "allowed verbose, then: # print the average number of overlapping bboxes. len_rpn_acc_rpn_moni =", "weights.\") print(\"Weights can be found in the keras application folder \\ https://github.com/fchollet/keras/tree/master/keras/applications\") def", "# Update progress bar in the current epoch progress_bar.update( iter_num + 1, [", "selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist()", "progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while", "size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples),", "self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val'", "if s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) #", "roi, img_data, self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get", "Mean Absolute Error ) # test save summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all,", "= random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn,", "annotate file or create annotate file from dataset. \"\"\" # Instance parser, recover", "np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:,", "# Input Tensor Regions of Interest self.roi_input = Input(shape=(None, 4)) # Models for", "= loss_class[2] self.losses[iter_num, 4] = loss_class[3] def __update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the", "History class Trainer(object): \"\"\"Setup training and run for some epochs.\"\"\" def __init__(self, results_path,", "print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss =", "mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def", "other data structures. \"\"\" # Randomize data random.shuffle(self.all_data) # Set for training process", "the base layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it will assign the", "from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config import Config from", "3]) class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch)", "print(\"Couldn't load pretrained model weights.\") print(\"Weights can be found in the keras application", "ious = roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2 is None: self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0)", "self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn =", "vs current loss: {}'.format(best_loss, curr_loss)) # Update the best loss if the current", "Y1, Y2) # Update progress bar in the current epoch progress_bar.update( iter_num +", "bg was not added, it will be added to the image data dictionaries.", "self.model_classifier = Model([img_input, self.roi_input], classifier) # This is a model that holds both", "decreased from {} to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss #", ":], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2]", "boxes from RPN overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy", "Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative process iter_num = 0 best_loss = np.Inf", "parameter determines whether or not the selection is made with replacement (default this", "value False). \"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples", "random pos or neg sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if", "learning_rate=1e-5): \"\"\"Set hyperparameters before the training process.\"\"\" # Config file self.config.data_augmentation = data_augmentation", "time for current epoch # Instance progress bar for display progress in current", "use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2,", "- len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples else: \"\"\"In the extreme", "X2, Y1, Y2): \"\"\"Update losses for RPN and classifier.\"\"\" # Calculate weights according", "add data to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr)", "= len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X,", "for image in TensorFlow self.input_shape_image = (None, None, 3) def recover_data( self, dataset_path,", "self.model_classifier = None self.model_all = None # Training process self.iter_num = 0 self.losses", "size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples else: \"\"\"In the", "progress bar for display progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num", "number of overlapping bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor)", "None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history = History(results_path) # System and", "len(self.all_data) self.train_images = [s for s in self.all_data if s['imageset'] == 'trainval'] self.val_images", "Metrics, Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import", "= use_gpu self.parser = None self.all_data = [] self.classes_count = [] self.class_mapping =", "== self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num = 0 break", "num_rois, weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the training process.\"\"\" #", "in range(self.config.num_epochs): start_time = time.time() # init time for current epoch # Instance", "= 'Classifier accuracy for bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier:", "print('Training complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize data generators, shuffle the data and", "X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] =", "correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() # Iterative", "= time.time() # init time for current epoch # Instance progress bar for", "= sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = [] message =", "start_time self.rpn_accuracy_for_epoch = [] # Print the resume of the epoch if self.config.verbose:", "sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num, 0] = loss_rpn[1]", "the current epoch progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num,", "\"\"\"Update losses for RPN and classifier.\"\"\" # Calculate weights according to classifier batch", "Faster R-CNN.\"\"\" img_input = Input(shape=self.input_shape_image) # Define the base network (VGG16) shared_layers =", "for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message = \"RPN", "+ loss_class_regr print('Best loss: {} vs current loss: {}'.format(best_loss, curr_loss)) # Update the", "= np.where(Y1[0, :, -1] == 1) pos_samples = np.where(Y1[0, :, -1] == 0)", "then: # print the average number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1", "from RPN = {}\" message += \" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length))", "else: # Leave the negative samples list empty neg_samples = [] if len(pos_samples)", "Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional neural network self.cnn", "or neg sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2):", "# test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load", "file or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries", "current loss: {}'.format(best_loss, curr_loss)) # Update the best loss if the current loss", "data random.shuffle(self.all_data) # Set for training process self.num_images = len(self.all_data) self.train_images = [s", "model that holds both the RPN and the classifier... # Used to load/save", "\"\"\" selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples", "Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) # This", "can be ' message += 'loaded when testing to ensure correct results' print(message.format(config_output_filename))", "len(pos_samples) > 0: pos_samples = pos_samples[0] else: pos_samples = [] return (neg_samples, pos_samples)", "the config data for training process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename, 'wb') as", "4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time = time.time() -", "Y1, Y2): \"\"\"Update losses for RPN and classifier.\"\"\" # Calculate weights according to", "= data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path self.config.num_epochs =", "= loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] =", "log device placement (on which device the operation ran) config_gpu.log_device_placement = True sess", "('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1))", "print('Exception: {}'.format(e)) continue print('Training complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize data generators, shuffle", "boxes from RPN = {}\" message += \" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes,", "message = 'Config has been written to {}, and can be ' message", "Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou", "results_path = \"training_results/1\" trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data(", "negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses, for class detector and", "the extreme case where num_rois = 1, we pick a random pos or", "[] # add data to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss)", "'Total loss decreased from {} to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss =", "# Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls =", "objects. classifier = self.cnn.build_classifier( shared_layers, num_classes=len(self.classes_count) ) # Build models for Faster R-CNN.", "tf.compat.v1.ConfigProto() # dynamically grow the memory used on the GPU config_gpu.gpu_options.allow_growth = True", "# Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans #", "annotate file from dataset. \"\"\" # Instance parser, recover data from annotate file", "import generic_utils from frcnn.data_generator import Metrics, Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers", "= 0 break except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue print('Training complete!!!,", "__update_losses_in_epoch(self, epoch_num, best_loss, start_time): \"\"\"Update the final losses after the epochs ends.\"\"\" #", "self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and negatives", "used on the GPU config_gpu.gpu_options.allow_growth = True # to log device placement (on", "= \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path", "samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length,", "Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row for epoch", "from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2, ious = roi_helper.calc_iou( roi, img_data,", ") self.losses[iter_num, 0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1]", "= np.random.choice( a=pos_samples, size=self.config.num_rois // 2, replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples,", "np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self,", "0: pos_samples = pos_samples[0] else: pos_samples = [] return (neg_samples, pos_samples) def __select_samples(self,", "Y) # pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI Helper", "it will use for training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes (including", "R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) # This is", "self.history.save_model_image(self.model_classifier, \"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights from a pretrained model.\"\"\" try:", "(None, None, 3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate", "samples {}'.format(len(self.train_images))) print('Num val samples {}'.format(len(self.val_images))) # Create data generators self.data_gen_train = Utils.get_anchor_gt(", "self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3] = loss_class[2] self.losses[iter_num, 4] = loss_class[3] def", "case where num_rois = 1, we pick a random pos or neg sample.", "negatives samples for complete number RoIs. \"\"\" if self.config.num_rois > 1: if len(pos_samples)", "= np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total =", "Define the base network (VGG16) shared_layers = self.cnn.build_nn_base(img_input) # Define the RPN, built", "with open(config_output_filename, 'wb') as config_f: pickle.dump(self.config, config_f) message = 'Config has been written", "= [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create the unified model Faster R-CNN.\"\"\"", "{}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as e: print('Exception: {}'.format(e))", "device placement (on which device the operation ran) config_gpu.log_device_placement = True sess =", "self.val_images = [s for s in self.all_data if s['imageset'] == 'test'] print('Num train", "('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ] ) iter_num += 1 #", "for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y) # pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X)", "setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the memory", "config_gpu.gpu_options.allow_growth = True # to log device placement (on which device the operation", "= None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history = History(results_path) # System", "a pretrained model.\"\"\" try: print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True", "= None # Input Tensor Regions of Interest self.roi_input = Input(shape=(None, 4)) #", "= Config() self.config.use_gpu = use_gpu self.parser = None self.all_data = [] self.classes_count =", "[] self.classes_count = [] self.class_mapping = [] self.num_images = 0 self.num_anchors = 0", "def __prepare_train(self): \"\"\"Initialize data generators, shuffle the data and create other data structures.", "[] self.class_mapping = [] self.num_images = 0 self.num_anchors = 0 self.input_shape_image = None", "weights_input_path self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors", "loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes =", "= self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans # If bg was not added,", "2, replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist()", "class Trainer(object): \"\"\"Setup training and run for some epochs.\"\"\" def __init__(self, results_path, use_gpu=False):", "generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\", weights_input_path=weights_input_path, num_epochs=1", "np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create the unified", "where num_rois = 1, we pick a random pos or neg sample. \"\"\"", "> thresh) neg_samples = np.where(Y1[0, :, -1] == 1) pos_samples = np.where(Y1[0, :,", "is completed if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time )", "# Show resume from loaded data self.show_info_data() def show_info_data(self): \"\"\"Show data that it", "number RoIs. \"\"\" if self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois // 2:", "persistence in config object self.config.class_mapping = self.class_mapping # Show resume from loaded data", "print(message) def __validate_samples(self, neg_samples, pos_samples): \"\"\"Format positives and negatives samples.\"\"\" if len(neg_samples) >", "time import pickle import logging import traceback from optparse import OptionParser import numpy", "selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples +", "LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' # Mean Absolute Error", "self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num = 0 break except", "= [] message = \"Average number of overlapping bounding boxes from RPN =", "epoch progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls',", ").tolist() sel_samples = selected_pos_samples + selected_neg_samples else: \"\"\"In the extreme case where num_rois", "self.rpn_accuracy_rpn_monitor = [] message = \"Average number of overlapping bounding boxes from RPN", "# Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2, ious =", "for the models. self.__load_weights() # Save the models like a trainable object. self.__compile_models()", "__prepare_train(self): \"\"\"Initialize data generators, shuffle the data and create other data structures. \"\"\"", "overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and", "weights_output_path, weights_input_path, num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the training process.\"\"\" # Config", "random import pprint import sys import time import pickle import logging import traceback", "resume from loaded data self.show_info_data() def show_info_data(self): \"\"\"Show data that it will use", "self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create", "or not the selection is made with replacement (default this parameter takes the", "np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:,", "weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save the best model self.history.save_best_model( self.model_all,", "layers. rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it will assign the class of", ") # Tensor for image in TensorFlow self.input_shape_image = (None, None, 3) def", "= 0 best_loss = np.Inf # Start iterative process print(\"The training has begun", "Y2) # Update progress bar in the current epoch progress_bar.update( iter_num + 1,", "loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss: {} vs current loss: {}'.format(best_loss, curr_loss))", "replace=False ).tolist() try: selected_neg_samples = np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except:", "def show_info_data(self): \"\"\"Show data that it will use for training.\"\"\" print('Training images per", "RoIs. \"\"\" if self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples", "self.input_shape_image = (None, None, 3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data", "= {}\" message += \" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes", "neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples", "self.rpn_accuracy_rpn_monitor.append(0) self.rpn_accuracy_for_epoch.append(0) continue # Get negatives samples and positive samples (IoU > thresh)", "# Set for training process self.num_images = len(self.all_data) self.train_images = [s for s", "current epoch # Instance progress bar for display progress in current epoch progress_bar", "best_loss: message = 'Total loss decreased from {} to {}, saving weights' print(message.format(best_loss,", "X2, Y1, Y2, ious = roi_helper.calc_iou( roi, img_data, self.class_mapping ) if X2 is", "neg_samples, pos_samples): \"\"\"Format positives and negatives samples.\"\"\" if len(neg_samples) > 0: # Just", "optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] ) self.model_classifier.compile( optimizer=optimizer_classifier, loss=[", "import Metrics, Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn", "begun :)\") for epoch_num in range(self.config.num_epochs): start_time = time.time() # init time for", "\"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path +", "Adam, SGD, RMSprop from keras.layers import Input from keras.models import Model from keras.utils", "from optparse import OptionParser import numpy as np import tensorflow as tf from", "+ 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr',", "iterative process print(\"The training has begun :)\") for epoch_num in range(self.config.num_epochs): start_time =", "self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) # Get data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate)", "the memory used on the GPU config_gpu.gpu_options.allow_growth = True # to log device", "curr_loss)) # Update the best loss if the current loss is better. if", "path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\", weights_input_path=weights_input_path,", "\"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test save plots self.history.save_model_image(self.model_rpn, \"rpn\") self.history.save_model_image(self.model_classifier, \"classifier\")", "self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist() else: selected_pos_samples = np.random.choice( a=pos_samples, size=self.config.num_rois //", "self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num + 1)) ]", "X positives samples and Y negatives samples for complete number RoIs. \"\"\" if", "self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor = [] self.rpn_accuracy_for_epoch", "curr_loss)) best_loss = curr_loss # Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path )", "self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history = History(results_path) # System and session", ") iter_num = 0 break except Exception as e: #traceback.print_exc() print('Exception: {}'.format(e)) continue", "message += 'loaded when testing to ensure correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train", "Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train' ) self.data_gen_val = Utils.get_anchor_gt( self.val_images, self.classes_count, self.config,", "> 0: # Just choose the first one neg_samples = neg_samples[0] else: #", "20% for validation self.train_images = None self.val_images = None # Convolutional Neural Network", "= self.__validate_samples( neg_samples, pos_samples ) self.rpn_accuracy_rpn_monitor.append(len(pos_samples)) self.rpn_accuracy_for_epoch.append((len(pos_samples))) # Select samples from positives and", "+= \" for {} previous iteration(s).\" print(message.format(mean_overlapping_bboxes, self.config.epoch_length)) if mean_overlapping_bboxes == 0: message", "= len(self.classes_count) losses = LossesCalculator(num_classes, self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile(", "import Input from keras.models import Model from keras.utils import generic_utils from frcnn.data_generator import", "self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row for epoch info info = []", "= 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in config object self.config.class_mapping =", "iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num = 0", "# If the current epoch is completed if iter_num == self.config.epoch_length: best_loss =", "[] self.num_images = 0 self.num_anchors = 0 self.input_shape_image = None self.results_path = results_path", "\"classifier\") self.history.save_model_image(self.model_all, \"all\") def __load_weights(self): \"\"\"Load weights from a pretrained model.\"\"\" try: print('Loading", "positives samples and Y negatives samples for complete number RoIs. \"\"\" if self.config.num_rois", "use for training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes (including bg) =", "extreme case where num_rois = 1, we pick a random pos or neg", "print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls))", "not producing bounding boxes that overlap the \" message += \"ground truth boxes.", "row for epoch info info = [] # add data to info list", "if self.config.num_rois > 1: if len(pos_samples) < self.config.num_rois // 2: selected_pos_samples = pos_samples.tolist()", "for current epoch # Instance progress bar for display progress in current epoch", "summaries self.history.save_summary(self.model_rpn, \"rpn\") self.history.save_summary(self.model_classifier, \"classifier\") self.history.save_summary(self.model_all, \"all\") # test save plots self.history.save_model_image(self.model_rpn, \"rpn\")", "RPN and classifier.\"\"\" # Calculate weights according to classifier batch training. loss_class =", "image in TensorFlow self.input_shape_image = (None, None, 3) def recover_data( self, dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\",", "loss if the current loss is better. if curr_loss < best_loss: message =", "was not added, it will be added to the image data dictionaries. if", "ensure correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train() self.__build_frcnn() #", "0 self.input_shape_image = None self.results_path = results_path # Datasets for training, split 80%", "except Exception as e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model weights.\") print(\"Weights can", "coding: utf-8 -*- from __future__ import division import random import pprint import sys", "# Build models for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input,", "= \"RPN is not producing bounding boxes that overlap the \" message +=", "data to info list info.append(epoch_num + 1) info.append(mean_overlapping_bboxes) info.append(class_acc) info.append(curr_loss) info.append(loss_rpn_cls) info.append(loss_rpn_regr) info.append(loss_class_cls)", "# Instance convolutional neural network self.cnn = CNN( self.num_anchors, (self.roi_input, self.config.num_rois), len(self.classes_count) )", "= Utils.get_anchor_gt( self.val_images, self.classes_count, self.config, CNN.get_img_output_length, mode='val' ) self.losses = np.zeros((self.config.epoch_length, 5)) self.rpn_accuracy_rpn_monitor", "best loss if the current loss is better. if curr_loss < best_loss: message", "from positives and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses, for", "iter_num += 1 # If the current epoch is completed if iter_num ==", "been written to {}, and can be ' message += 'loaded when testing", "negatives samples and positive samples (IoU > thresh) neg_samples = np.where(Y1[0, :, -1]", "frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers from frcnn.cnn import CNN from frcnn.utilities.config", "file self.config.data_augmentation = data_augmentation self.config.num_rois = num_rois self.config.weights_output_path = weights_output_path self.config.weights_input_path = weights_input_path", "'Config has been written to {}, and can be ' message += 'loaded", "detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls", "as config_f: pickle.dump(self.config, config_f) message = 'Config has been written to {}, and", "frcnn.data_generator import Metrics, Utils from frcnn.losses import LossesCalculator from frcnn.roi_helpers import ROIHelpers from", "data from annotate file or dataset self.parser = Parser( dataset_path=dataset_path, annotate_path=annotate_path ) #", "iter_num = 0 best_loss = np.Inf # Start iterative process print(\"The training has", "= neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return", "the data and create other data structures. \"\"\" # Randomize data random.shuffle(self.all_data) #", "weights for the models self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier) # Use", "pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples =", "and self.config.verbose: self.__print_average_bbxes() X, Y, img_data = next(self.data_gen_train) # calc loss for RPN", "as e: print('Exception: {}'.format(e)) print(\"Couldn't load pretrained model weights.\") print(\"Weights can be found", "current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs)) while True: try:", "to classifier batch training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples,", "display progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch {}/{}'.format(epoch_num + 1, self.config.num_epochs))", "super(Trainer, self).__init__() self.config = Config() self.config.use_gpu = use_gpu self.parser = None self.all_data =", "= [] return (neg_samples, pos_samples) def __select_samples(self, neg_samples, pos_samples): \"\"\"Select X positives samples", "+ allowed verbose, then: # print the average number of overlapping bboxes. len_rpn_acc_rpn_moni", "# Start iterative process print(\"The training has begun :)\") for epoch_num in range(self.config.num_epochs):", "print('Loading weights from {}'.format(self.config.weights_input_path)) self.model_rpn.load_weights(self.config.weights_input_path, by_name=True) self.model_classifier.load_weights( self.config.weights_input_path, by_name=True ) except Exception as", "epochs ends.\"\"\" # Average losses loss_rpn_cls = np.mean(self.losses[:, 0]) loss_rpn_regr = np.mean(self.losses[:, 1])", "current epoch progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]),", "<filename>model/train.py # -*- coding: utf-8 -*- from __future__ import division import random import", "self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples, :]] ) self.losses[iter_num,", "complete!!!, exiting :p') def __prepare_train(self): \"\"\"Initialize data generators, shuffle the data and create", "numpy as np import tensorflow as tf from keras import backend as K", "data dictionaries ans = self.parser.get_data(generate_annotate=generate_annotate) self.all_data, self.classes_count, self.class_mapping = ans # If bg", "curr_loss # Save the best model self.history.save_best_model( self.model_all, self.config.weights_output_path ) # Generate row", "samples list empty neg_samples = [] if len(pos_samples) > 0: pos_samples = pos_samples[0]", "num_epochs=5, epoch_length=32, learning_rate=1e-5): \"\"\"Set hyperparameters before the training process.\"\"\" # Config file self.config.data_augmentation", "and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2) # Update progress bar", "\"\"\"Recover data from annotate file or create annotate file from dataset. \"\"\" #", "dataset_path, annotate_path=\"frcnn/utilities/annotate.txt\", generate_annotate=False): \"\"\"Recover data from annotate file or create annotate file from", "self.model_classifier.compile( optimizer=optimizer_classifier, loss=[ LossesCalculator.class_loss_cls, LossesCalculator.class_loss_regr() ], metrics={'dense_class_{}'.format(num_classes): 'accuracy'}, ) self.model_all.compile( optimizer='sgd', loss='mae' #", "# Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) #", "RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss RPN regression: {}'.format(loss_rpn_regr)) print('Loss detector", "and negatives samples sel_samples = self.__select_samples(neg_samples, pos_samples) # Update losses, for class detector", "number of overlapping bboxes. len_rpn_acc_rpn_moni = len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if", "{} to {}, saving weights' print(message.format(best_loss, curr_loss)) best_loss = curr_loss # Save the", "the epoch if self.config.verbose: message = 'Mean number of bounding boxes from RPN", "converts from (x1,y1,x2,y2) to (x,y,w,h) format X2, Y1, Y2, ious = roi_helper.calc_iou( roi,", "pickle.dump(self.config, config_f) message = 'Config has been written to {}, and can be", "for Faster R-CNN. self.model_rpn = Model(img_input, rpn[:2]) self.model_classifier = Model([img_input, self.roi_input], classifier) #", "# Update the best loss if the current loss is better. if curr_loss", "random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples def __update_losses(self, sel_samples, iter_num, loss_rpn, X,", "ran) config_gpu.log_device_placement = True sess = tf.compat.v1.Session(config=config_gpu) def configure( self, data_augmentation, num_rois, weights_output_path,", "self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in config object", "np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch)", "if iter_num == self.config.epoch_length: best_loss = self.__update_losses_in_epoch( epoch_num, best_loss, start_time ) iter_num =", "Instance progress bar for display progress in current epoch progress_bar = generic_utils.Progbar(self.config.epoch_length) print('Epoch", "self.config.use_gpu = use_gpu self.parser = None self.all_data = [] self.classes_count = [] self.class_mapping", "will use for training.\"\"\" print('Training images per class:') pprint.pprint(self.classes_count) print('Num classes (including bg)", "/ len(self.rpn_accuracy_for_epoch) total_time = time.time() - start_time self.rpn_accuracy_for_epoch = [] # Print the", "print('Elapsed time: {}'.format(total_time)) curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr print('Best", "a ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300 ) # Convert RPN", "# pred with RPN pred_rpn = self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper", "classes (including bg) = {}'.format(len(self.classes_count))) # Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename):", "the models self.model_all = Model([img_input, self.roi_input], rpn[:2] + classifier) # Use to load/save", "self.num_anchors = 0 self.input_shape_image = None self.results_path = results_path # Datasets for training,", "rpn = self.cnn.create_rpn(shared_layers) # Define classifier, it will assign the class of the", "training, split 80% training and 20% for validation self.train_images = None self.val_images =", "] ) iter_num += 1 # If the current epoch is completed if", "\"\"\"Initialize data generators, shuffle the data and create other data structures. \"\"\" #", "Instance parser, recover data from annotate file or dataset self.parser = Parser( dataset_path=dataset_path,", "R-CNN self.model_rpn = None self.model_classifier = None self.model_all = None # Training process", "0] = loss_rpn[1] self.losses[iter_num, 1] = loss_rpn[2] self.losses[iter_num, 2] = loss_class[1] self.losses[iter_num, 3]", "iter_num + 1, [ ('rpn_cls', self.losses[iter_num, 0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]),", "accuracy for bounding boxes from RPN: {}' print(message.format(class_acc)) print('Loss RPN classifier: {}'.format(loss_rpn_cls)) print('Loss", "optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ] )", "# Save the models like a trainable object. self.__compile_models() def __compile_models(self): \"\"\" Create", "\"ground truth boxes. Check RPN settings or keep training.\" print(message) def __validate_samples(self, neg_samples,", "self.cnn.build_nn_base(img_input) # Define the RPN, built on the base layers. rpn = self.cnn.create_rpn(shared_layers)", "if len(neg_samples) > 0: # Just choose the first one neg_samples = neg_samples[0]", "self.config.num_epochs = num_epochs self.config.epoch_length = epoch_length self.config.learning_rate = learning_rate # Trainer self.num_anchors =", "in self.all_data if s['imageset'] == 'test'] print('Num train samples {}'.format(len(self.train_images))) print('Num val samples", "# Used to load/save weights for the models self.model_all = Model([img_input, self.roi_input], rpn[:2]", "loss_rpn_regr = np.mean(self.losses[:, 1]) loss_class_cls = np.mean(self.losses[:, 2]) loss_class_regr = np.mean(self.losses[:, 3]) class_acc", "class_acc = np.mean(self.losses[:, 4]) total = sum(self.rpn_accuracy_for_epoch) mean_overlapping_bboxes = float(total) / len(self.rpn_accuracy_for_epoch) total_time", "regression: {}'.format(loss_rpn_regr)) print('Loss detector classifier: {}'.format(loss_class_cls)) print('Loss detector regression: {}'.format(loss_class_regr)) print('Elapsed time: {}'.format(total_time))", "0 self.losses = None self.rpn_accuracy_rpn_monitor = None self.rpn_accuracy_for_epoch = None self.history = History(results_path)", "session, setup.\"\"\" sys.setrecursionlimit(40000) logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) if(self.config.use_gpu): config_gpu = tf.compat.v1.ConfigProto() # dynamically grow the", "roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True ) # Calc_iou converts from (x1,y1,x2,y2) to (x,y,w,h) format", "for epoch info info = [] # add data to info list info.append(epoch_num", "np.random.choice( a=neg_samples, size=self.config.num_rois - len(selected_pos_samples), replace=True ).tolist() sel_samples = selected_pos_samples + selected_neg_samples else:", "classifier.\"\"\" # Calculate weights according to classifier batch training. loss_class = self.model_classifier.train_on_batch( [X,", "\"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist() if np.random.randint(0, 2): sel_samples = random.choice(neg_samples)", "self.rpn_accuracy_for_epoch = [] def __build_frcnn(self): \"\"\"Create the unified model Faster R-CNN.\"\"\" img_input =", "Persistence the data self.history.save_classes_info(self.classes_count) def save_config(self, config_output_filename): \"\"\"Do persistence the config data for", "len(self.rpn_accuracy_rpn_monitor) cond1 = (len_rpn_acc_rpn_moni == self.config.epoch_length) if cond1 and self.config.verbose: self.__print_average_bbxes() X, Y,", "self.roi_input = Input(shape=(None, 4)) # Models for Faster R-CNN self.model_rpn = None self.model_classifier", "not in self.classes_count: self.classes_count['bg'] = 0 self.class_mapping['bg'] = len(self.class_mapping) # Mapping persistence in", "\"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset = \"/home/octocat/Escritorio/flowchart_3b_v3\" trainer.recover_data( path_dataset, generate_annotate=False, annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False,", "None self.val_images = None # Convolutional Neural Network self.cnn = None # Data", "{}'.format(len(self.val_images))) # Create data generators self.data_gen_train = Utils.get_anchor_gt( self.train_images, self.classes_count, self.config, CNN.get_img_output_length, mode='train'", "\"\"\"Do persistence the config data for training process.\"\"\" self.config.config_output_filename = config_output_filename with open(config_output_filename,", "classifier) # This is a model that holds both the RPN and the", "len(self.class_mapping) # Mapping persistence in config object self.config.class_mapping = self.class_mapping # Show resume", "self.input_shape_image = None self.results_path = results_path # Datasets for training, split 80% training", "size=self.config.num_rois - len(selected_pos_samples), replace=False ).tolist() except: \"\"\"The replace parameter determines whether or not", "training. loss_class = self.model_classifier.train_on_batch( [X, X2[:, sel_samples, :]], [Y1[:, sel_samples, :], Y2[:, sel_samples,", "an epoch is completed + allowed verbose, then: # print the average number", "bboxes.\"\"\" total = sum(self.rpn_accuracy_rpn_monitor) mean_overlapping_bboxes = float(total) mean_overlapping_bboxes /= len(self.rpn_accuracy_rpn_monitor) self.rpn_accuracy_rpn_monitor = []", "Model([img_input, self.roi_input], classifier) # This is a model that holds both the RPN", "self.__compile_models() def __compile_models(self): \"\"\" Create optimizers and compile models.\"\"\" learning_rate = self.config.learning_rate num_classes", "if np.random.randint(0, 2): sel_samples = random.choice(neg_samples) else: sel_samples = random.choice(pos_samples) return sel_samples def", "__name__ == '__main__': results_path = \"training_results/1\" trainer = Trainer(results_path) weights_input_path = \"vgg16_weights_tf_dim_ordering_tf_kernels.h5\" path_dataset", "except: \"\"\"The replace parameter determines whether or not the selection is made with", "0]), ('rpn_regr', self.losses[iter_num, 1]), ('det_cls', self.losses[iter_num, 2]), ('det_regr', self.losses[iter_num, 3]), ('epoch', int(epoch_num +", "= self.model_rpn.predict_on_batch(X) # Instance a ROI Helper roi_helper = ROIHelpers( self.config, overlap_thresh=0.9, max_boxes=300", "# -*- coding: utf-8 -*- from __future__ import division import random import pprint", "= tf.compat.v1.ConfigProto() # dynamically grow the memory used on the GPU config_gpu.gpu_options.allow_growth =", "mean_overlapping_bboxes == 0: message = \"RPN is not producing bounding boxes that overlap", "# Leave the negative samples list empty neg_samples = [] if len(pos_samples) >", "testing to ensure correct results' print(message.format(config_output_filename)) def train(self): \"\"\"Train the Faster R-CNN.\"\"\" self.__prepare_train()", "epoch is completed + allowed verbose, then: # print the average number of", "curr_loss = loss_rpn_cls + loss_rpn_regr + loss_class_cls + loss_class_regr print('Best loss: {} vs", "Y, img_data = next(self.data_gen_train) # calc loss for RPN loss_rpn = self.model_rpn.train_on_batch(X, Y)", "a random pos or neg sample. \"\"\" selected_pos_samples = pos_samples.tolist() selected_neg_samples = neg_samples.tolist()", ":)\") for epoch_num in range(self.config.num_epochs): start_time = time.time() # init time for current", "traceback from optparse import OptionParser import numpy as np import tensorflow as tf", "= 'Mean number of bounding boxes from RPN overlapping ground truth boxes: {}'", "= learning_rate # Trainer self.num_anchors = len(self.config.anchor_box_scales) self.num_anchors *= len(self.config.anchor_box_ratios) # Instance convolutional", "self.__load_weights() # Save the models like a trainable object. self.__compile_models() def __compile_models(self): \"\"\"", "added to the image data dictionaries. if 'bg' not in self.classes_count: self.classes_count['bg'] =", "one neg_samples = neg_samples[0] else: # Leave the negative samples list empty neg_samples", "annotate_path=results_path + \"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\", weights_input_path=weights_input_path, num_epochs=1 )", "\"/annotate.txt\" ) trainer.configure( data_augmentation=False, num_rois=32, weights_output_path=results_path + \"/model_frcnn.hdf5\", weights_input_path=weights_input_path, num_epochs=1 ) trainer.save_config(results_path +", ") # Convert RPN to ROI roi = roi_helper.convert_rpn_to_roi( pred_rpn[0], pred_rpn[1], use_regr=True )", "overlapping ground truth boxes: {}' print(message.format(mean_overlapping_bboxes)) message = 'Classifier accuracy for bounding boxes", "current loss is better. if curr_loss < best_loss: message = 'Total loss decreased", "detector and RPN self.__update_losses(sel_samples, iter_num, loss_rpn, X, X2, Y1, Y2) # Update progress", "pretrained model weights.\") print(\"Weights can be found in the keras application folder \\", "self.num_anchors) optimizer = Adam(lr=learning_rate) optimizer_classifier = Adam(lr=learning_rate) self.model_rpn.compile( optimizer=optimizer, loss=[ LossesCalculator.rpn_loss_cls(), LossesCalculator.rpn_loss_regr() ]", "progress bar in the current epoch progress_bar.update( iter_num + 1, [ ('rpn_cls', self.losses[iter_num," ]
[ "parse(self): configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME']", "__name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser", "config_json_parser.py # ######################################################################## if __name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE)", "Testing purposes: # # run this script in a directory containing a valid", "# python3 -i config_json_parser.py # ######################################################################## if __name__ == '__main__': CONFIG_FILE = 'config.json'", "config.json file # # along with the pysud basic modules # # python3", "\"\"\" Summary Attributes: config_json_file: A valid json file path containing pysud game configurations.", "purposes: # # run this script in a directory containing a valid config.json", "with the pysud basic modules # # python3 -i config_json_parser.py # ######################################################################## if", "return config_dict ######################################################################## # Testing purposes: # # run this script in a", "= ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser = None print('Configurations found: ' + str(config))", "config_json_file: A valid json file path containing pysud game configurations. \"\"\" def __init__(self,", "configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes: # # run this script in", "file path containing pysud game configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path,", "run this script in a directory containing a valid config.json file # #", "config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## #", "config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes: # # run this", "######################################################################## # Testing purposes: # # run this script in a directory containing", "= self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] =", "config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes: #", "a directory containing a valid config.json file # # along with the pysud", "= configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes: # # run this script", "config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return", "configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes:", "'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser = None print('Configurations found: '", "ConfigJsonParser(): \"\"\" Summary Attributes: config_json_file: A valid json file path containing pysud game", "\"\"\" def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json =", "Summary Attributes: config_json_file: A valid json file path containing pysud game configurations. \"\"\"", "# along with the pysud basic modules # # python3 -i config_json_parser.py #", "valid json file path containing pysud game configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file", "basic modules # # python3 -i config_json_parser.py # ######################################################################## if __name__ == '__main__':", "the pysud basic modules # # python3 -i config_json_parser.py # ######################################################################## if __name__", "######################################################################## if __name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config =", "# run this script in a directory containing a valid config.json file #", "if __name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse()", "modules # # python3 -i config_json_parser.py # ######################################################################## if __name__ == '__main__': CONFIG_FILE", "a valid config.json file # # along with the pysud basic modules #", "configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes: # # run", "script in a directory containing a valid config.json file # # along with", "# # along with the pysud basic modules # # python3 -i config_json_parser.py", "import json import pysud class ConfigJsonParser(): \"\"\" Summary Attributes: config_json_file: A valid json", "json file path containing pysud game configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file =", "game configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self):", "encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME']", "pysud basic modules # # python3 -i config_json_parser.py # ######################################################################## if __name__ ==", "def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config']", "dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ########################################################################", "this script in a directory containing a valid config.json file # # along", "self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME']", "file # # along with the pysud basic modules # # python3 -i", "== '__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser =", "-i config_json_parser.py # ######################################################################## if __name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser =", "containing pysud game configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8'))", "class ConfigJsonParser(): \"\"\" Summary Attributes: config_json_file: A valid json file path containing pysud", "pysud class ConfigJsonParser(): \"\"\" Summary Attributes: config_json_file: A valid json file path containing", "def parse(self): configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] =", "# ######################################################################## if __name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config", "pysud game configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def", "self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict = dict()", "'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL']", "configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME']", "along with the pysud basic modules # # python3 -i config_json_parser.py # ########################################################################", "= 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser = None print('Configurations found:", "= dict() config_dict['ENABLE_JOURNAL'] = configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict", "directory containing a valid config.json file # # along with the pysud basic", "'__main__': CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser = None", "containing a valid config.json file # # along with the pysud basic modules", "# # run this script in a directory containing a valid config.json file", "A valid json file path containing pysud game configurations. \"\"\" def __init__(self, config_json_file_path):", "CONFIG_FILE = 'config.json' config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser = None print('Configurations", "json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL'] =", "config_dict ######################################################################## # Testing purposes: # # run this script in a directory", "valid config.json file # # along with the pysud basic modules # #", "configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json", "json import pysud class ConfigJsonParser(): \"\"\" Summary Attributes: config_json_file: A valid json file", "= configurations_json['ENABLE_JOURNAL'] config_dict['ENABLE_SAVEGAME'] = configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing", "config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict =", "__init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict", "Attributes: config_json_file: A valid json file path containing pysud game configurations. \"\"\" def", "# Testing purposes: # # run this script in a directory containing a", "= json.load(open(config_json_file_path, 'r', encoding='utf-8')) def parse(self): configurations_json = self.config_json_file['config'] config_dict = dict() config_dict['ENABLE_JOURNAL']", "= configurations_json['ENABLE_SAVEGAME'] config_dict['PLAYER_DEFAULT_NAME'] = configurations_json['PLAYER_DEFAULT_NAME'] return config_dict ######################################################################## # Testing purposes: # #", "path containing pysud game configurations. \"\"\" def __init__(self, config_json_file_path): self.config_json_file = json.load(open(config_json_file_path, 'r',", "config_json_parser = ConfigJsonParser(CONFIG_FILE) config = config_json_parser.parse() config_json_parser = None print('Configurations found: ' +", "in a directory containing a valid config.json file # # along with the", "import pysud class ConfigJsonParser(): \"\"\" Summary Attributes: config_json_file: A valid json file path", "# # python3 -i config_json_parser.py # ######################################################################## if __name__ == '__main__': CONFIG_FILE =", "python3 -i config_json_parser.py # ######################################################################## if __name__ == '__main__': CONFIG_FILE = 'config.json' config_json_parser" ]
[ "def get_level(connection): try: res = connection.get('/lol-summoner/v1/current-summoner') res_json = res.json() return res_json['summonerLevel'] except requests.exceptions.RequestException:", "import requests def get_level(connection): try: res = connection.get('/lol-summoner/v1/current-summoner') res_json = res.json() return res_json['summonerLevel']", "get_level(connection): try: res = connection.get('/lol-summoner/v1/current-summoner') res_json = res.json() return res_json['summonerLevel'] except requests.exceptions.RequestException: return", "try: res = connection.get('/lol-summoner/v1/current-summoner') res_json = res.json() return res_json['summonerLevel'] except requests.exceptions.RequestException: return -1", "<filename>client/summoner.py import requests def get_level(connection): try: res = connection.get('/lol-summoner/v1/current-summoner') res_json = res.json() return", "requests def get_level(connection): try: res = connection.get('/lol-summoner/v1/current-summoner') res_json = res.json() return res_json['summonerLevel'] except" ]
[ "class Editing: def __init__(self, company, mobile, address2, notes): self.company = company self.mobile =", "company, mobile, address2, notes): self.company = company self.mobile = mobile self.address2 = address2", "notes): self.company = company self.mobile = mobile self.address2 = address2 self.notes = notes", "__init__(self, company, mobile, address2, notes): self.company = company self.mobile = mobile self.address2 =", "def __init__(self, company, mobile, address2, notes): self.company = company self.mobile = mobile self.address2", "mobile, address2, notes): self.company = company self.mobile = mobile self.address2 = address2 self.notes", "address2, notes): self.company = company self.mobile = mobile self.address2 = address2 self.notes =", "Editing: def __init__(self, company, mobile, address2, notes): self.company = company self.mobile = mobile" ]
[ "VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None,", "from src.executor.Printer import Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True,", "True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None, filepath=None)) if __name__ == '__main__': unittest.main()", "import Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def", "from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer", "src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer =", "src.executor.Printer import Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True)", "TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None, filepath=None)) if", "import unittest from src.executor.Printer import Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def", "import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer = Printer()", "Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self):", "class TestIExecutor(unittest.TestCase): def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None, filepath=None))", "def test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None, filepath=None)) if __name__", "test_compiles(self): self.assertEqual(True, True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None, filepath=None)) if __name__ ==", "unittest from src.executor.Printer import Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase): def test_compiles(self):", "self.assertEqual(True, True) def test_printer(self): printer = Printer() printer.run(VideoItem(metadata=None, filepath=None)) if __name__ == '__main__':", "python3 import unittest from src.executor.Printer import Printer from src.data.VideoItem import VideoItem class TestIExecutor(unittest.TestCase):", "#!/usr/bin/env python3 import unittest from src.executor.Printer import Printer from src.data.VideoItem import VideoItem class" ]
[ "Below dimensions are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv", "#%% if __name__ == \"__main__\": ## Random Tensor for Input ## shape :", "_dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size,", "self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation,", "self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im", "None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d", "conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel,", "__name__ == \"__main__\": ## Random Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ##", "## Below dimensions are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make", "ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half}", "= self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def", "kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5)) # 2. compute y = complexConv(x)", "as np from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64:", "ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5)) #", "bias=bias, device=device, dtype=dtype) def forward(self, x): # shape of x : [batch,channel,axis, ...]", "stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride,", "device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel,", "torch.nn as nn import numpy as np from abc import ABCMeta, abstractmethod class", "stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x): # shape of", "@abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv):", "class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self,", "out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel,", "= conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im =", "torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1,", "_convolution_factory(self): return nn.Conv3d #%% if __name__ == \"__main__\": ## Random Tensor for Input", "totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object ## (in_channel,", ": [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) #", "padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype,", "== \"__main__\": ## Random Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below", "= {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1,", "torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def", "def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def", "nn.Conv3d #%% if __name__ == \"__main__\": ## Random Tensor for Input ## shape", "- self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return", "class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if __name__ == \"__main__\": ## Random", "padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x): # shape of x", "if __name__ == \"__main__\": ## Random Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size]", "__init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory", "kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype", "## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random x = torch.randn((10,3,100,100),", "bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re =", "out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x): #", "self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None", "dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups,", "x): # shape of x : [batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag)", "shape of x : [batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary =", "1. Make ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter is required complexConv =", "def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def", "# -*- coding: utf-8 -*- import torch import torch.nn as nn import numpy", "numpy as np from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping =", "from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128:", "stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype =", "dtype=torch.cfloat) # 1. Make ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter is required", "ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if", "ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if __name__ == \"__main__\": ## Random Tensor", "= self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)),", "import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32:", "dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation,", "ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv):", "Make ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5))", "forward(self, x): # shape of x : [batch,channel,axis, ...] real = self.conv_re(x.real) -", "torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1,", "dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype)", "coding: utf-8 -*- import torch import torch.nn as nn import numpy as np", "# 1. Make ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter is required complexConv", "abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def", "out_channel, kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5)) # 2. compute y =", "random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object ## (in_channel, out_channel,", "torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None,", "output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class", "= torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter", "in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory =", "groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re", "import numpy as np from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping", "# shape of x : [batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary", "= conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self,", "abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double,", "are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object ##", "conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride,", "def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__()", "\"__main__\": ## Random Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions", "dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return", "-*- import torch import torch.nn as nn import numpy as np from abc", "super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size,", "import torch import torch.nn as nn import numpy as np from abc import", "[batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1.", "conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x):", "padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding,", "groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups,", "(in_channel, out_channel, kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5)) # 2. compute y", "nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if __name__ == \"__main__\": ##", "return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return", "return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return", "self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def", "def forward(self, x): # shape of x : [batch,channel,axis, ...] real = self.conv_re(x.real)", "imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod", "Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random x =", "for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random x", "return nn.Conv3d #%% if __name__ == \"__main__\": ## Random Tensor for Input ##", "x : [batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) +", "self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device,", "kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size,", "-*- coding: utf-8 -*- import torch import torch.nn as nn import numpy as", ": [batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real)", "x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object ## (in_channel, out_channel, kernel_size)", "utf-8 -*- import torch import torch.nn as nn import numpy as np from", "Object ## (in_channel, out_channel, kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5)) # 2.", "Random Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally", "dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype)", "nn import numpy as np from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta):", "dtype=None): super().__init__() conv_factory = self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel,", "as nn import numpy as np from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module,", "nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d", "torch import torch.nn as nn import numpy as np from abc import ABCMeta,", "dimensions are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object", "return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d", "_convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self):", "Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random", "## Random Tensor for Input ## shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are", "self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output)", "BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel,", "out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None): super().__init__() conv_factory = self._convolution_factory()", "dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x): # shape of x :", "self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self):", "torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class", "real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1),", "torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True,", "torch.randn((10,3,100,100), dtype=torch.cfloat) # 1. Make ComplexConv Object ## (in_channel, out_channel, kernel_size) parameter is", "dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype)", "np from abc import ABCMeta, abstractmethod class BaseComplexConv(nn.Module, metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float,", "device=device, dtype=dtype) def forward(self, x): # shape of x : [batch,channel,axis, ...] real", "...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output =", "<filename>complexcnn/modules.py<gh_stars>0 # -*- coding: utf-8 -*- import torch import torch.nn as nn import", "class ComplexConv1d(BaseComplexConv): def _convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class", "## (in_channel, out_channel, kernel_size) parameter is required complexConv = ComplexConv2d(3,10,(5,5)) # 2. compute", "_convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if __name__ ==", "self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1)", "dtype=dtype) def forward(self, x): # shape of x : [batch,channel,axis, ...] real =", "_convolution_factory(self): return nn.Conv1d class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self):", "of x : [batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag)", "kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x): # shape", "= torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv):", "def _convolution_factory(self): return nn.Conv3d #%% if __name__ == \"__main__\": ## Random Tensor for", "imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return None class ComplexConv1d(BaseComplexConv): def _convolution_factory(self):", "metaclass=ABCMeta): _dtype_mapping = {torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel,", "bias=bias, device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias,", "return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if __name__ == \"__main__\":", "torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True, device=None, dtype=None):", "class ComplexConv2d(BaseComplexConv): def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%%", "= self._convolution_factory() dtype = self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding,", "{torch.complex64: torch.float, torch.complex128: torch.double, torch.complex32: torch.half} def __init__(self, in_channel, out_channel, kernel_size, stride=1, padding=0,", "= self._dtype_mapping.get(dtype, dtype) self.conv_re = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias,", "[batch,channel,axis, ...] real = self.conv_re(x.real) - self.conv_im(x.imag) imaginary = self.conv_re(x.imag) + self.conv_im(x.real) output", "groups=groups, bias=bias, device=device, dtype=dtype) def forward(self, x): # shape of x : [batch,channel,axis,", "shape : [batchsize,channel,axis1_size,axis2_size] ## Below dimensions are totally random x = torch.randn((10,3,100,100), dtype=torch.cfloat)", "device=device, dtype=dtype) self.conv_im = conv_factory(in_channel, out_channel, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias, device=device,", "def _convolution_factory(self): return nn.Conv2d class ComplexConv3d(BaseComplexConv): def _convolution_factory(self): return nn.Conv3d #%% if __name__", "+ self.conv_im(x.real) output = torch.cat((real.unsqueeze_(-1), imaginary.unsqueeze_(-1)), dim=-1) return torch.view_as_complex(output) @abstractmethod def _convolution_factory(self): return", "import torch.nn as nn import numpy as np from abc import ABCMeta, abstractmethod" ]
[ "template to use in processing\"\"\" self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files',", "with open( temp_file, 'w') as fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1] + \"\\n\\n\")", "program and exports results to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile =", "ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0,", "excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR: Unsupported file type: \" +", "files processed total_invalid -- total number of invalid rows total_empty -- total number", "fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes error message for files", "multiple files list templates in the same order as the files they correspond", "+ filename) window.setstatus(\"ERROR: Unable to read file: \" + filename) if exporter is", "data = Data(filename, temp) else: data = Data(filename) if not data.raw_data: print(\"ERROR: Unable", "display -- output window Frame object template -- template to use in process", "outputs (i.e. HTML report or writing to exported file). Keyword Arguments: args --", "excel: os.remove(file) if __name__ == '__main__': \"\"\"If the program is run with application.py", "Data(filename, temp) else: data = Data(filename) if not data.raw_data: print(\"ERROR: Unable to read", "label.destroy() def reset(self): \"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy()", "= len(templates) print(num_templates) num_files = len(filenames) if num_templates == num_files: for i in", "print(\"Error, different number of files and templates\") else: for name in filenames: main(name,", "sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\" + sheet + \".csv\")", "4/7] Finding Errors\") if window is not None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running", "'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens", "temp_file, 'w') as fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files", "**kwargs): \"\"\" Create Data and Report objects, providing necessary information for them to", "!= None: export.write_summary() if excel: for file in excel: os.remove(file) if __name__ ==", "throughout files \"\"\" def __init__(self, filename, offline=True): self.filename = filename self.total_files = 0", "new_name = os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w', newline='') as fp: wr =", "xlrd from tkinter import * from tkinter import filedialog, ttk from threading import", "and templates\") else: for name in filenames: main(name, exporter=export, window=window) if export !=", "parser.add_argument('filenames', nargs='+',\\ help='one or more filenames for the processor to analyse') parser.add_argument('-t', nargs='+',", "+ new_name + \" is not open in another program\") return None filenames.append(new_name)", "wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv file already exists and is open", "instead of generating HTML Reports Author: <NAME> Last Updated: 28/02/2017 \"\"\" import argparse", "in output window reset -- Resets the program removing all files from the", "self.templateLabel = Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process", "is not None: window.setstatus(\"WARNING: Unsupported file type \" + file) if exportfile !=", "window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates != None or templates: if len(templates)", "they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or more filenames for the processor", "is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected:", "Columns: \" + str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns: \" + str(self.total_col) +", "program and generates report for all files processed process_export -- Runs program and", "progress bar back to the start templateaskopenfile -- Asks for a template to", "not None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\") if window is not None:", "list and removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all", "file containing analysis of all files processed removefile -- Removes file from being", "= [] self.template = None # Main Window mainwindow = Frame(root) self.display =", "PermissionError: # Occurs if export file is open self.setstatus(\"ERROR: Permission Denied, ensure export", "data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of all files processed\"\"\" temp_file", "for file in self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process", "os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed: \" + str(self.total_files) + \"\\n\") fp.write(\"Total Invalid", "template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and generates report", "text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow,", "padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1,", "fp.write(\"Total Errors: \" + str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r') as fd: for", "filetext(self, files): \"\"\"Provides text for output box given a list of files\"\"\" remove_file", "else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\") if window", "program at runtime. exporter -- Exporter object if applicable \"\"\" exporter = kwargs.pop('exporter',", "more filenames for the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for", "self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] =", "+ '\\n') self.total_empty = len(empty_columns) fp.write(\"Number of Error Cells: \" + str(len(data.errors)) +", "self.filename) def write_error(self, data): \"\"\"Writes error message for files not processed fully\"\"\" with", "self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder to", "= wb.sheet_names() if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] +", "text for output box given a list of files\"\"\" remove_file = lambda x,", "window Frame object template -- template to use in process if applicable \"\"\"", "Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is not None: self.datafiles = [file.name for file", "\"\\n\") fp.write(\"Total Invalid Rows: \" + str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns: \"", "\"\"\"Removes file from process list and removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy()", "else: print(\"ERROR: Unsupported file type: \" + file) if window is not None:", "if template is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display,", "Updated: 28/02/2017 \"\"\" import argparse import webbrowser import textwrap import xlrd from tkinter", "except: from data import * from report import * from template_reader import *", "analysis and create desired outputs (i.e. HTML report or writing to exported file).", "max def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates", "= [] if len(sys.argv) > 1: terminal = True pathname = os.path.dirname(sys.argv[0]) parser", "to run analysis and create desired outputs (i.e. HTML report or writing to", "contained files in the output window filetext -- Fills output box given a", "self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s) to process: \").grid(row=0, sticky=E, pady=10)", "padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar", "in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder", "in simpler and more explanatory way Methods: dataaskopenfile -- Asks for files to", "= Exporter(exportfile) else: export = None if window is not None: window.setmaxprogress(len(filenames) *", "root.mainloop() def dataaskopenfile(self): \"\"\" Asks for files to process and displays them in", "back to the start templateaskopenfile -- Asks for a template to use during", "!= '': export = Exporter(exportfile) else: export = None if window is not", "and templates and runs the program over them. Converts excel files and applies", "and sets progress bar back to the start templateaskopenfile -- Asks for a", "file containing analysis of all files run in program Methods: write_stats -- writes", "here. This will process all the command line arguments before proceeding. \"\"\" files", "== 1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w',", "Links to Create template web page of Data-oracle website process_report -- Runs program", "self.total_errors = 0 self.total_col = 0 if not offline: with open(self.filename, 'w') as", "+= len(data.invalid_rows) empty_columns = [column.header for column in data.columns if column.empty] fp.write(\"Number of", "else: for name in filenames: main(name, exporter=export, window=window) if export != None: export.write_summary()", "Asks for a template to use during processing and displays it in the", "Variables: terminal -- boolean value whether program is running through terminal or through", "= None if window is not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if", "None: window.step_progress() window.setstatus(\"Processing \" + filename + \"...\") if len(args) > 1: temp", "in files: label = Label(self.display, text=str(\"\\t\" + file), anchor='w') if os.name == 'posix':", "xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name =", "is not None: window.setstatus(\"Completed Analysis for \" + filename) def get_file_dir(location): \"\"\"Returns the", "Folder(s) to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10)", "and displays them in the output window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template Selected:", "in another program\") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else:", "exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\") if window is not None: window.step_progress() print(\"Completed", "displays the contained files in the output window filetext -- Fills output box", "Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3,", "create desired outputs (i.e. HTML report or writing to exported file). Keyword Arguments:", "if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type + '\\n')", "Permission Denied, ensure export file is not open in another program\") def removefile(self,", "File(s) or Folder(s) to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1,", "Unable to read file: \" + filename) window.setstatus(\"ERROR: Unable to read file: \"", "functionality when excel files have multiple sheets print(\"Error, different number of files and", "Report objects, providing necessary information for them to run analysis and create desired", "processing exportfile -- file to export analysis to if applicable \"\"\" filenames =", "if name_ext[1] == '.xls' or name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting to csv", "exportfile -- file to export analysis to if applicable \"\"\" filenames = []", "self.total_files += 1 fp.write(\"Number of Invalid rows: \" + str(len(data.invalid_rows)) + '\\n') self.total_invalid", "Label(mainwindow, text=\"Select File(s) or Folder(s) to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template", "command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow,", "= len(data.errors) fp.write(\"Number of Valid Columns: \" + str(len(data.columns)) + '\\n') self.total_col =", "filename + \"...\") data.analysis() if exporter is None: print(\"[Step 6/7] Generating report\") report", "containing analysis of all files processed removefile -- Removes file from being processed", "fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for", "def main(*args, **kwargs): \"\"\" Create Data and Report objects, providing necessary information for", "fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes", "text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'),", "of Valid Columns: \" + str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns)) if data.delimiter_type", "# If created csv file already exists and is open window.setstatus(\"ERROR: Permission Denied,", "data object write_summary -- writes summary of all files to be run after", "to Create template web page of Data-oracle website process_report -- Runs program and", "for multiple files list templates in the same order as the files they", "self.total_empty = len(empty_columns) fp.write(\"Number of Error Cells: \" + str(len(data.errors)) + '\\n') self.total_errors", "def write_error(self, data): \"\"\"Writes error message for files not processed fully\"\"\" with open(self.filename,", "not processed fully\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" +", "self.progress[\"maximum\"] = max def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class", "rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names: sh =", "97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is not None: self.datafiles = [file.name for", "os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes error message for files not processed fully\"\"\"", "not None: window.step_progress() print(\"Completed analysis for: \", filename) if window is not None:", "name to save export file as total_files -- total number of files processed", "given a list of files maketemplate -- Links to Create template web page", "except PermissionError: # If created csv file already exists and is open window.setstatus(\"ERROR:", "run analysis and create desired outputs (i.e. HTML report or writing to exported", "to Process Folder...\") return self.template def setmaxprogress(self, max): self.progress[\"maximum\"] = max def step_progress(self):", "import Thread try: from .data import * from .report import * from .template_reader", "Removes file from being processed after being selected in output window reset --", "os.path.split(data.filename)[1] + '\\n') self.total_files += 1 fp.write(\"Number of Invalid rows: \" + str(len(data.invalid_rows))", "of datafiles to be processed display -- output window Frame object template --", "files to be processed templates -- files to use as templates in processing", "',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self):", "= wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\" + sheet + \".csv\") try: with", "data import * from report import * from template_reader import * terminal =", "folder to process and displays the contained files in the output window filetext", "of files maketemplate -- Links to Create template web page of Data-oracle website", "Analysis for \" + filename) def get_file_dir(location): \"\"\"Returns the directory of the file", "None: window.step_progress() window.setstatus(\"Running Analysis on \" + filename + \"...\") data.analysis() if exporter", "applies template to each file Keyword arguments: files -- files to be processed", "is running through terminal or through GUI progress -- Progress bar showing progress", "Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting for", "from being processed after being selected in output window reset -- Resets the", "kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and exports results to file\"\"\" self.progress[\"value\"] = 0", "open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR:", "columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) #", "Reading data\") if window is not None: window.step_progress() window.setstatus(\"Processing \" + filename +", "\" + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook',", "\"\"\"Asks for folder to process and displays the contained files in the output", "5.0 + 0.01) if templates != None or templates: if len(templates) == 1:", "self.total_files = 0 self.total_invalid = 0 self.total_empty = 0 self.total_errors = 0 self.total_col", "import filedialog, ttk from threading import Thread try: from .data import * from", "Generating report\") report = Report(data) str_report = report.html_report() html = report.gen_html(str_report) # returns", "them in the output window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template Selected: \" +", "TODO keep functionality when excel files have multiple sheets print(\"Error, different number of", "Data(filename) if not data.raw_data: print(\"ERROR: Unable to read file: \" + filename) window.setstatus(\"ERROR:", "# Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar self.statusText = StringVar()", "as fd: for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data):", "in another program\") def removefile(self, file, label): \"\"\"Removes file from process list and", "on \" + filename + \"...\") data.analysis() if exporter is None: print(\"[Step 6/7]", "dataaskopenfolder -- Asks for folder to process and displays the contained files in", "self.setstatus(\"Ready to Process Folder...\") return self.template def setmaxprogress(self, max): self.progress[\"maximum\"] = max def", "args -- Arguments provided to the program at runtime. exporter -- Exporter object", "for program. Contains GUI interface and exporting class that creates files instead of", "not open in another program\") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv':", "bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks for files to", "filename) def get_file_dir(location): \"\"\"Returns the directory of the file with the file name", "label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command=", "text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files): \"\"\"Provides", "+ \"_\" + sheet + \".csv\") try: with open(new_name, 'w', newline='') as fp:", "exporter -- Exporter object if applicable \"\"\" exporter = kwargs.pop('exporter', None) window =", "Progress bar showing progress through program \"\"\" class DisplayWindow: \"\"\"GUI for application allowing", "if window is not None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\") if window", "files in the output window filetext -- Fills output box given a list", "import argparse import webbrowser import textwrap import xlrd from tkinter import * from", "def write_summary(self): \"\"\"Writes summary of all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open(", "if applicable \"\"\" filenames = [] excel = [] for file in files:", "specify template to describe data further. Templates can be used to describe one", "from data import * from report import * from template_reader import * terminal", "line arguments before proceeding. \"\"\" files = [] templates = [] if len(sys.argv)", "label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser", "0 if not offline: with open(self.filename, 'w') as fp: pass def write_stats(self, data):", "Unable to read file: \" + filename) if exporter is not None: exporter.write_error(data)", "progress -- Progress bar showing progress through program \"\"\" class DisplayWindow: \"\"\"GUI for", "\"\"\"Writes statistics of a single data object\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2)", "pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can process", "mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar self.statusText =", "displays the contained files in the output window\"\"\" self.reset() if self.template is not", "to process and displays the contained files in the output window filetext --", "exported file). Keyword Arguments: args -- Arguments provided to the program at runtime.", "sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack()", "('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is not None: self.datafiles = [file.name", "process and displays them in the output window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template", "export analysis to if applicable \"\"\" filenames = [] excel = [] for", "files in the output window\"\"\" self.reset() if self.template is not None: Label(self.display, text=str(\"Template", "total_files -- total number of files processed total_invalid -- total number of invalid", "'\\n') self.total_files += 1 fp.write(\"Number of Invalid rows: \" + str(len(data.invalid_rows)) + '\\n')", "[] templates = [] if len(sys.argv) > 1: terminal = True pathname =", "defaultextension=\"*.csv\") if template is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel =", "> 1: temp = Template(args[1]) data = Data(filename, temp) else: data = Data(filename)", "fp.write(\"Total Files Analysed: \" + str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows: \" +", "to create template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and", "maketemplate(self, event): \"\"\"Opens webbrowser to create template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def", "window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles =", "print(\"[Step 6/7] Generating report\") report = Report(data) str_report = report.html_report() html = report.gen_html(str_report)", "range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else: # TODO keep functionality when excel", "+ filename) def get_file_dir(location): \"\"\"Returns the directory of the file with the file", "setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates a file containing analysis of", "column in data.columns if column.empty] fp.write(\"Number of Empty Columns: \" + str(len(empty_columns)) +", "Folder...\") return self.template def setmaxprogress(self, max): self.progress[\"maximum\"] = max def step_progress(self): self.progress.step() def", "textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks for files", "text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text=\"> Create Template\", fg=\"blue\")", "if self.template: Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All", "str_report = report.html_report() html = report.gen_html(str_report) # returns string of html, also generates", "('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError:", "if window is not None: window.setstatus(\"WARNING: Unsupported file type \" + file) if", "= os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1]", "exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files',", "exporter = kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename = args[0] print(\"[Step 1/7]", "rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv file already exists", "\" + str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns: \" + str(self.total_empty) + \"\\n\")", "window is not None: window.step_progress() window.setstatus(\"Processing \" + filename + \"...\") if len(args)", "= [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if", "None or templates: if len(templates) == 1: for name in filenames: main(name, templates[0],", "for: \",filename) if window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating", "\"\"\" exporter = kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename = args[0] print(\"[Step", "\").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2)", "padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow,", "files\"\"\" remove_file = lambda x, m: (lambda p: self.removefile(x, m)) for file in", "each file Keyword arguments: files -- files to be processed templates -- files", "window is not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if window is", "str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type +", "window\"\"\" self.reset() if self.template is not None: Label(self.display, text=str(\"Template Selected: \" + self.template.name),", "'*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is not None: self.datafiles =", "is run with application.py as the argument to the command line execution begins", "string of html, also generates html report for debugging purposes print(\"[Step 7/7] Report", "program. Contains GUI interface and exporting class that creates files instead of generating", "data.columns if column.empty] fp.write(\"Number of Empty Columns: \" + str(len(empty_columns)) + '\\n') self.total_empty", "+ '\\n') fp.write(\"ERROR: Unable to read file, no readable data detected.\\n\\n\") def main(*args,", "Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return self.datafiles def", "-- output window Frame object template -- template to use in process if", "to the program at runtime. exporter -- Exporter object if applicable \"\"\" exporter", "str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns: \" + str(self.total_col) + \"\\n\") fp.write(\"Total Errors:", "object if applicable \"\"\" exporter = kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename", "read file, no readable data detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create Data and", "process and displays them in the output window dataaskopenfolder -- Asks for folder", "sets progress bar back to the start templateaskopenfile -- Asks for a template", "0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and exports", "to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given files') args =", "\" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files): \"\"\"Provides text for", "Frame object template -- template to use in process if applicable \"\"\" def", "anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003", "as total_files -- total number of files processed total_invalid -- total number of", "output box given a list of files\"\"\" remove_file = lambda x, m: (lambda", "[] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X)", "Fills output box given a list of files maketemplate -- Links to Create", "returns string of html, also generates html report for debugging purposes print(\"[Step 7/7]", "Columns: \" + str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns)) if data.delimiter_type == ',':", "a single data object write_summary -- writes summary of all files to be", "None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\") if window", "[] excel = [] for file in files: name_ext = os.path.splitext(file) # TODO", "window = kwargs.pop('window', None) filename = args[0] print(\"[Step 1/7] Processing file: \",filename) print(\"[Step", "self.filetext(self.datafiles) return folder def filetext(self, files): \"\"\"Provides text for output box given a", "Exporter object if applicable \"\"\" exporter = kwargs.pop('exporter', None) window = kwargs.pop('window', None)", "self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program", "== '.xls' or name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting to csv file\") wb", "str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r')", "to the start templateaskopenfile -- Asks for a template to use during processing", "at runtime. exporter -- Exporter object if applicable \"\"\" exporter = kwargs.pop('exporter', None)", "'w', newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name)", "window=window) else: num_templates = len(templates) print(num_templates) num_files = len(filenames) if num_templates == num_files:", "file: \" + filename) window.setstatus(\"ERROR: Unable to read file: \" + filename) if", "process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10) label3 =", "csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv file", "Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0,", "'*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except", "exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: # Occurs if", "to describe one or more csv files. If using multiple templates for multiple", "try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files,", "page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and generates report at", "Finding Errors\") if window is not None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\")", "web page of Data-oracle website process_report -- Runs program and generates report for", "reset(self): \"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display =", "self.reset() if self.template: Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r',", "m: (lambda p: self.removefile(x, m)) for file in files: label = Label(self.display, text=str(\"\\t\"", "not None: window.setstatus(\"WARNING: Unsupported file type \" + file) if exportfile != '':", "self.template def setmaxprogress(self, max): self.progress[\"maximum\"] = max def step_progress(self): self.progress.step() def setstatus(self, msg):", "column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5)", "+ filename + \"...\") data.analysis() if exporter is None: print(\"[Step 6/7] Generating report\")", "Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder to process and displays the", "\"\"\"Writes error message for files not processed fully\"\"\" with open(self.filename, 'r+') as fp:", "a list of files\"\"\" remove_file = lambda x, m: (lambda p: self.removefile(x, m))", "!= '': self.datafiles = [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder:", "file from process list and removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def", "another program\") def removefile(self, file, label): \"\"\"Removes file from process list and removes", "simpler and more explanatory way Methods: dataaskopenfile -- Asks for files to process", "\"\"\" class DisplayWindow: \"\"\"GUI for application allowing users to interact with program in", "excel: for file in excel: os.remove(file) if __name__ == '__main__': \"\"\"If the program", ".report import * from .template_reader import * except: from data import * from", "to process and displays the contained files in the output window\"\"\" self.reset() if", "Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View", "main(name, exporter=export, window=window) if export != None: export.write_summary() if excel: for file in", "to each file Keyword arguments: files -- files to be processed templates --", "self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder to process and displays the contained files", "number of empty columns total_errors -- total numher of errors throughout files \"\"\"", "files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as fp: fp.write(\"Error Report", "templates -- files to use as templates in processing exportfile -- file to", "Keyword Arguments: args -- Arguments provided to the program at runtime. exporter --", "with the file name Keyword arguments: location -- A file path. \"\"\" return", "numher of errors throughout files \"\"\" def __init__(self, filename, offline=True): self.filename = filename", "body for program. Contains GUI interface and exporting class that creates files instead", "a single data object\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \"", "bar showing progress through program \"\"\" class DisplayWindow: \"\"\"GUI for application allowing users", "Global Variables: terminal -- boolean value whether program is running through terminal or", "tkinter import filedialog, ttk from threading import Thread try: from .data import *", "= kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename = args[0] print(\"[Step 1/7] Processing", "program is running through terminal or through GUI progress -- Progress bar showing", "\"\"\"Opens webbrowser to create template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs", "+ data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of all files processed\"\"\"", "\"\\n\") fp.write(\"Total Empty Columns: \" + str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns: \"", "text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2,", "name in filenames: main(name, templates[0], exporter=export, window=window) else: num_templates = len(templates) print(num_templates) num_files", "export file as total_files -- total number of files processed total_invalid -- total", "\" + str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for column", "0 self.total_invalid = 0 self.total_empty = 0 self.total_errors = 0 self.total_col = 0", "0 def templateaskopenfile(self): \"\"\"Asks for template to use in processing\"\"\" self.template = []", "analysis of all files run in program Methods: write_stats -- writes summary of", "nargs='+', metavar='template', help='a template for the given files') args = parser.parse_args() process_files(args.filenames, args.t)", "self.reset() if self.template is not None: Label(self.display, text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X)", "+ str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\")", "begins here. This will process all the command line arguments before proceeding. \"\"\"", "True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can", "None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates != None or templates: if", "os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1] +", "data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\") if window is not None: window.step_progress()", "also generates html report for debugging purposes print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed", "len(args) > 1: temp = Template(args[1]) data = Data(filename, temp) else: data =", "window filetext -- Fills output box given a list of files maketemplate --", "description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can process one or more csv files. Can", "for folder to process and displays the contained files in the output window\"\"\"", "to use as templates in processing exportfile -- file to export analysis to", "removing all files from the process queue and sets progress bar back to", "path. \"\"\" return location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None): \"\"\"Process files and templates", "len(filenames) if num_templates == num_files: for i in range(0, num_files): main(filenames[i], templates[i], exporter=export,", "= 0 self.total_errors = 0 self.total_col = 0 if not offline: with open(self.filename,", "kwargs.pop('window', None) filename = args[0] print(\"[Step 1/7] Processing file: \",filename) print(\"[Step 2/7] Reading", "print(\"Completed analysis for: \",filename) if window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step", "print(\"Completed analysis for: \", filename) if window is not None: window.setstatus(\"Completed Analysis for", "in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes error message for", "different number of files and templates\") else: for name in filenames: main(name, exporter=export,", "ttk from threading import Thread try: from .data import * from .report import", "process one or more csv files. Can specify template to describe data further.", "purposes print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed analysis for: \",filename) if window is", "name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR: Unsupported file type: \" + file) if", "text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return self.datafiles def dataaskopenfolder(self):", "str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else:", "export file is not open in another program\") def removefile(self, file, label): \"\"\"Removes", "= Data(filename) if not data.raw_data: print(\"ERROR: Unable to read file: \" + filename)", "'w') as fp: pass def write_stats(self, data): \"\"\"Writes statistics of a single data", "len(sys.argv) > 1: terminal = True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\", "HTML Reports Author: <NAME> Last Updated: 28/02/2017 \"\"\" import argparse import webbrowser import", "range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name", "label = Label(self.display, text=str(\"\\t\" + file), anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file,", "pre-analysis\") if window is not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if", "self.removefile(x, m)) for file in files: label = Label(self.display, text=str(\"\\t\" + file), anchor='w')", "Asks for files to process and displays them in the output window\"\"\" self.reset()", "filenames.append(file) else: print(\"ERROR: Unsupported file type: \" + file) if window is not", "file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w',", "csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names:", "self.total_col = str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" +", "\" is not open in another program\") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1]", "self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return self.template def setmaxprogress(self, max): self.progress[\"maximum\"]", "file) if window is not None: window.setstatus(\"WARNING: Unsupported file type \" + file)", "\" + str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows: \" + str(self.total_invalid) + \"\\n\")", "proceeding. \"\"\" files = [] templates = [] if len(sys.argv) > 1: terminal", "Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return", "from template_reader import * terminal = False \"\"\" Global Variables: terminal -- boolean", "print(num_templates) num_files = len(filenames) if num_templates == num_files: for i in range(0, num_files):", "= 0 if not offline: with open(self.filename, 'w') as fp: pass def write_stats(self,", "remove_file = lambda x, m: (lambda p: self.removefile(x, m)) for file in files:", "num_files: for i in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else: # TODO", "= max def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that", "os.path.splitext(file) # TODO handle empty sheets if name_ext[1] == '.xls' or name_ext[1] ==", "= os.path.join(os.path.splitext(file)[0] + \"_\" + sheet + \".csv\") try: with open(new_name, 'w', newline='')", "= filedialog.askdirectory() if folder != '': self.datafiles = [] for file in os.listdir(folder):", "sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\" + sheet + \".csv\") try:", "import * terminal = False \"\"\" Global Variables: terminal -- boolean value whether", "data.raw_data: print(\"ERROR: Unable to read file: \" + filename) window.setstatus(\"ERROR: Unable to read", "folder = filedialog.askdirectory() if folder != '': self.datafiles = [] for file in", "open(self.filename, 'r') as fd: for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def", "displays them in the output window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template Selected: \"", "\"_\" + sheet + \".csv\") try: with open(new_name, 'w', newline='') as fp: wr", "for file in files: name_ext = os.path.splitext(file) # TODO handle empty sheets if", "import * except: from data import * from report import * from template_reader", "in data.columns if column.empty] fp.write(\"Number of Empty Columns: \" + str(len(empty_columns)) + '\\n')", "empty sheets if name_ext[1] == '.xls' or name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting", "-- file to export analysis to if applicable \"\"\" filenames = [] excel", "or through GUI progress -- Progress bar showing progress through program \"\"\" class", "generates html report for debugging purposes print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed analysis", "templates, exportfile='', window=None): \"\"\"Process files and templates and runs the program over them.", "Report(data) str_report = report.html_report() html = report.gen_html(str_report) # returns string of html, also", "report at the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start()", "import * from report import * from template_reader import * terminal = False", "fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') self.total_files += 1 fp.write(\"Number of Invalid", "directory of the file with the file name Keyword arguments: location -- A", "report\") report = Report(data) str_report = report.html_report() html = report.gen_html(str_report) # returns string", "0 self.total_empty = 0 self.total_errors = 0 self.total_col = 0 if not offline:", "message for files not processed fully\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis", "all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as fp: fp.write(\"Error", "label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser to create template page", "== '.xlsx': print(\"[Step 0/7] Converting to csv file\") wb = xlrd.open_workbook(file) sheet_names =", "Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew')", "export file is open self.setstatus(\"ERROR: Permission Denied, ensure export file is not open", "def templateaskopenfile(self): \"\"\"Asks for template to use in processing\"\"\" self.template = [] template", "output window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles", "program \"\"\" class DisplayWindow: \"\"\"GUI for application allowing users to interact with program", "to read file: \" + filename) if exporter is not None: exporter.write_error(data) return", "a file containing analysis of all files run in program Methods: write_stats --", "readable data detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create Data and Report objects, providing", "''')) parser.add_argument('filenames', nargs='+',\\ help='one or more filenames for the processor to analyse') parser.add_argument('-t',", "\".csv\" with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum in", "for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles)", "Columns: \" + str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors) + \"\\n\\n\")", "-- list of datafiles to be processed display -- output window Frame object", "processing all files Variables: filename -- file name to save export file as", "or name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting to csv file\") wb = xlrd.open_workbook(file)", "self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks for template to use in processing\"\"\" self.template", "+ str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns: \" + str(self.total_col) + \"\\n\") fp.write(\"Total", "'r') as fd: for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self,", "argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can process one or more csv files.", "window dataaskopenfolder -- Asks for folder to process and displays the contained files", "all files processed process_export -- Runs program and creates a file containing analysis", "multiple sheets print(\"Error, different number of files and templates\") else: for name in", "not offline: with open(self.filename, 'w') as fp: pass def write_stats(self, data): \"\"\"Writes statistics", "multiple templates for multiple files list templates in the same order as the", "\"\"\" Asks for files to process and displays them in the output window\"\"\"", "column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5)", "Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s) to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select", "-- Resets the program removing all files from the process queue and sets", "str(len(data.errors)) + '\\n') self.total_errors = len(data.errors) fp.write(\"Number of Valid Columns: \" + str(len(data.columns))", "total number of files processed total_invalid -- total number of invalid rows total_empty", "and generates report at the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles,", "the contained files in the output window filetext -- Fills output box given", "location -- A file path. \"\"\" return location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None):", "\"\"\"If the program is run with application.py as the argument to the command", "self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks", "terminal = True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files.", "Valid Columns: \" + str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors) +", "+ filename + \"...\") if len(args) > 1: temp = Template(args[1]) data =", "= self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N)", "1: temp = Template(args[1]) data = Data(filename, temp) else: data = Data(filename) if", "(lambda p: self.removefile(x, m)) for file in files: label = Label(self.display, text=str(\"\\t\" +", "to read file: \" + filename) window.setstatus(\"ERROR: Unable to read file: \" +", "else: for sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\"", "Csv files. ---------------------------------- Can process one or more csv files. Can specify template", "window.setstatus(\"WARNING: Unsupported file type \" + file) if exportfile != '': export =", "+ str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r') as fd: for line in fd:", "run with application.py as the argument to the command line execution begins here.", "fp.write(\"Total Empty Columns: \" + str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns: \" +", "process_export(self): \"\"\"Runs program and exports results to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\")", "invalid rows total_empty -- total number of empty columns total_errors -- total numher", "exportfile='', window=None): \"\"\"Process files and templates and runs the program over them. Converts", "all the command line arguments before proceeding. \"\"\" files = [] templates =", "filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template is not None:", "p: self.removefile(x, m)) for file in files: label = Label(self.display, text=str(\"\\t\" + file),", "further. Templates can be used to describe one or more csv files. If", "num_files): main(filenames[i], templates[i], exporter=export, window=window) else: # TODO keep functionality when excel files", "not data.raw_data: print(\"ERROR: Unable to read file: \" + filename) window.setstatus(\"ERROR: Unable to", "self.statusText.set(\"Ready to Process Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder to process", "Cells: \" + str(len(data.errors)) + '\\n') self.total_errors = len(data.errors) fp.write(\"Number of Valid Columns:", "column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew',", "+ '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of all files processed\"\"\" temp_file =", "'\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\")", "\"\"\"Writes summary of all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w')", "fd: for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes", "'\\n') fp.write(\"ERROR: Unable to read file, no readable data detected.\\n\\n\") def main(*args, **kwargs):", "+ os.path.split(data.filename)[1] + '\\n') self.total_files += 1 fp.write(\"Number of Invalid rows: \" +", "msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates a file containing analysis of all", "-- total number of invalid rows total_empty -- total number of empty columns", "created csv file already exists and is open window.setstatus(\"ERROR: Permission Denied, ensure \"", "file name to save export file as total_files -- total number of files", "None: window.setstatus(\"WARNING: Unsupported file type \" + file) if exportfile != '': export", "folder to process and displays the contained files in the output window\"\"\" self.reset()", "keep functionality when excel files have multiple sheets print(\"Error, different number of files", "self.total_empty = 0 self.total_errors = 0 self.total_col = 0 if not offline: with", "'w') as fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed:", "rows total_empty -- total number of empty columns total_errors -- total numher of", "window is not None: window.step_progress() window.setstatus(\"Running Analysis on \" + filename + \"...\")", "can be used to describe one or more csv files. If using multiple", "files. If using multiple templates for multiple files list templates in the same", "column=3, rowspan=7, sticky=N) # Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting for File...\") status", "= filename self.total_files = 0 self.total_invalid = 0 self.total_empty = 0 self.total_errors =", "to read file, no readable data detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create Data", "remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser to create template page on", "os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes error message for files not processed", "output window dataaskopenfolder -- Asks for folder to process and displays the contained", "template is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template", "files Variables: filename -- file name to save export file as total_files --", "+ \"\\n\\n\") with open(self.filename, 'r') as fd: for line in fd: fp.write(line) os.remove(self.filename)", "ensure export file is not open in another program\") def removefile(self, file, label):", "len(data.invalid_rows) empty_columns = [column.header for column in data.columns if column.empty] fp.write(\"Number of Empty", "\"\"\" filenames = [] excel = [] for file in files: name_ext =", "for sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\" +", "+ self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return self.template def setmaxprogress(self, max):", "num_files = len(filenames) if num_templates == num_files: for i in range(0, num_files): main(filenames[i],", "+ \".csv\" with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum", "Files', '*.csv')], defaultextension=\"*.csv\") if template is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy()", "\"\"\" return location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None): \"\"\"Process files and templates and", "If using multiple templates for multiple files list templates in the same order", "status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks for files to process and displays", "program Methods: write_stats -- writes summary of a single data object write_summary --", "str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for column in data.columns", "If created csv file already exists and is open window.setstatus(\"ERROR: Permission Denied, ensure", "class DisplayWindow: \"\"\"GUI for application allowing users to interact with program in simpler", "text=\"Select File(s) or Folder(s) to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional):", "report for all files processed process_export -- Runs program and creates a file", "fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks for files to process and displays them", "templateaskopenfile -- Asks for a template to use during processing and displays it", "except PermissionError: # Occurs if export file is open self.setstatus(\"ERROR: Permission Denied, ensure", "as the files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or more filenames", "pady=10) label3 = Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse", "be processed templates -- files to use as templates in processing exportfile --", "= wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w', newline='') as fp:", "-- files to be processed templates -- files to use as templates in", "anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks for files to process and", "+ self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'),", "fp: pass def write_stats(self, data): \"\"\"Writes statistics of a single data object\"\"\" with", "Variables: filename -- file name to save export file as total_files -- total", "Analysed: \" + str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows: \" + str(self.total_invalid) +", "+ str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns: \" + str(self.total_empty) + \"\\n\") fp.write(\"Total", "'\\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for column in data.columns if column.empty]", "in processing exportfile -- file to export analysis to if applicable \"\"\" filenames", "window is not None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\") if window is", "data = Data(filename) if not data.raw_data: print(\"ERROR: Unable to read file: \" +", "for files to process and displays them in the output window dataaskopenfolder --", "is not None: Label(self.display, text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory()", "file). Keyword Arguments: args -- Arguments provided to the program at runtime. exporter", "of \" + os.path.split(data.filename)[1] + '\\n') self.total_files += 1 fp.write(\"Number of Invalid rows:", "-- Asks for a template to use during processing and displays it in", "a list of files maketemplate -- Links to Create template web page of", "__init__(self, filename, offline=True): self.filename = filename self.total_files = 0 self.total_invalid = 0 self.total_empty", "if self.datafiles is not None: self.datafiles = [file.name for file in self.datafiles] Label(self.display,", "= report.gen_html(str_report) # returns string of html, also generates html report for debugging", "for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv file already", "GUI interface and exporting class that creates files instead of generating HTML Reports", "open in another program\") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file)", "argument to the command line execution begins here. This will process all the", "filename = args[0] print(\"[Step 1/7] Processing file: \",filename) print(\"[Step 2/7] Reading data\") if", "import webbrowser import textwrap import xlrd from tkinter import * from tkinter import", "'\\n') self.total_errors = len(data.errors) fp.write(\"Number of Valid Columns: \" + str(len(data.columns)) + '\\n')", "the contained files in the output window\"\"\" self.reset() if self.template is not None:", "users to interact with program in simpler and more explanatory way Methods: dataaskopenfile", "anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder != '': self.datafiles = [] for file", "print(\"[Step 1/7] Processing file: \",filename) print(\"[Step 2/7] Reading data\") if window is not", "window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\") if window is not None: window.step_progress() window.setstatus(\"Running", "= Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template = None # Main Window", "+ folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files): \"\"\"Provides text for output", "print(\"[Step 3/7] Running pre-analysis\") if window is not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7]", "= lambda x, m: (lambda p: self.removefile(x, m)) for file in files: label", "fully\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] +", "with open(self.filename, 'w') as fp: pass def write_stats(self, data): \"\"\"Writes statistics of a", "page of Data-oracle website process_report -- Runs program and generates report for all", "return folder def filetext(self, files): \"\"\"Provides text for output box given a list", "\").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow,", "Process Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder to process and displays", "filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window':", "text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1,", "label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser to create template page on Data-oracle", "+ \"\\n\") fp.write(\"Total Empty Columns: \" + str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns:", "Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\") if window is not None:", "and create desired outputs (i.e. HTML report or writing to exported file). Keyword", "Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is not None: self.datafiles", "None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if window is not None: window.step_progress()", "website process_report -- Runs program and generates report for all files processed process_export", "to be processed templates -- files to use as templates in processing exportfile", "TODO handle empty sheets if name_ext[1] == '.xls' or name_ext[1] == '.xlsx': print(\"[Step", "exporter=export, window=window) else: # TODO keep functionality when excel files have multiple sheets", "files and templates\") else: for name in filenames: main(name, exporter=export, window=window) if export", "\" + str(len(data.errors)) + '\\n') self.total_errors = len(data.errors) fp.write(\"Number of Valid Columns: \"", "try: with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum in", "filedialog, ttk from threading import Thread try: from .data import * from .report", "no readable data detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create Data and Report objects,", "provided to the program at runtime. exporter -- Exporter object if applicable \"\"\"", "\" + str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter:", "return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\") if window is not", "Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template = None # Main Window mainwindow", "len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\" with open(new_name,", "'\\n') self.total_empty = len(empty_columns) fp.write(\"Number of Error Cells: \" + str(len(data.errors)) + '\\n')", "file Keyword arguments: files -- files to be processed templates -- files to", "os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to read file, no readable data detected.\\n\\n\") def", "os.path.join(os.path.splitext(file)[0] + \"_\" + sheet + \".csv\") try: with open(new_name, 'w', newline='') as", "# returns string of html, also generates html report for debugging purposes print(\"[Step", "not None: window.setstatus(\"Completed Analysis for \" + filename) def get_file_dir(location): \"\"\"Returns the directory", "handle empty sheets if name_ext[1] == '.xls' or name_ext[1] == '.xlsx': print(\"[Step 0/7]", "= 0 self.total_empty = 0 self.total_errors = 0 self.total_col = 0 if not", "root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template = None # Main Window mainwindow =", "Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0,", "through GUI progress -- Progress bar showing progress through program \"\"\" class DisplayWindow:", "sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1,", "Template(args[1]) data = Data(filename, temp) else: data = Data(filename) if not data.raw_data: print(\"ERROR:", "status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\"", "if applicable \"\"\" exporter = kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename =", "# TODO keep functionality when excel files have multiple sheets print(\"Error, different number", "Successfully Generated\") print(\"Completed analysis for: \",filename) if window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2)", "= csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in", "command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10,", "to the command line execution begins here. This will process all the command", "+ os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to read file, no readable data detected.\\n\\n\")", "class Exporter(object): \"\"\"Class that creates a file containing analysis of all files run", "files from the process queue and sets progress bar back to the start", "or more filenames for the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template", "total numher of errors throughout files \"\"\" def __init__(self, filename, offline=True): self.filename =", "for folder to process and displays the contained files in the output window", "processed display -- output window Frame object template -- template to use in", "if len(sys.argv) > 1: terminal = True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\", "the program is run with application.py as the argument to the command line", "print(\"[Step 5/7] Running Analysis\") if window is not None: window.step_progress() window.setstatus(\"Running Analysis on", "the same order as the files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one", "if not offline: with open(self.filename, 'w') as fp: pass def write_stats(self, data): \"\"\"Writes", "text=str(\"\\t\" + file), anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\",", "+ \"\\n\") fp.write(\"Total Invalid Rows: \" + str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns:", "rows: \" + str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for", "self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self,", "if len(args) > 1: temp = Template(args[1]) data = Data(filename, temp) else: data", "pass def write_stats(self, data): \"\"\"Writes statistics of a single data object\"\"\" with open(self.filename,", "window.setstatus(\"Completed Analysis for \" + filename) def get_file_dir(location): \"\"\"Returns the directory of the", "analysis for: \",filename) if window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7]", "start templateaskopenfile -- Asks for a template to use during processing and displays", "report for debugging purposes print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed analysis for: \",filename)", "textwrap import xlrd from tkinter import * from tkinter import filedialog, ttk from", "nargs='+',\\ help='one or more filenames for the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template',", "+ \" is not open in another program\") return None filenames.append(new_name) excel.append(new_name) elif", "* terminal = False \"\"\" Global Variables: terminal -- boolean value whether program", "with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n')", "it in the output window Variables: datafiles -- list of datafiles to be", "+ '\\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for column in data.columns if", "offline=True): self.filename = filename self.total_files = 0 self.total_invalid = 0 self.total_empty = 0", "in the output window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template Selected: \" + self.template[0]),", "Errors: \" + str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r') as fd: for line", "number of files processed total_invalid -- total number of invalid rows total_empty --", "\" + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder != '': self.datafiles =", "-- total number of files processed total_invalid -- total number of invalid rows", "file already exists and is open window.setstatus(\"ERROR: Permission Denied, ensure \" + new_name", "__name__ == '__main__': \"\"\"If the program is run with application.py as the argument", "None: window.setstatus(\"Completed Analysis for \" + filename) def get_file_dir(location): \"\"\"Returns the directory of", "file to export analysis to if applicable \"\"\" filenames = [] excel =", "all files from the process queue and sets progress bar back to the", "total_invalid -- total number of invalid rows total_empty -- total number of empty", "creates files instead of generating HTML Reports Author: <NAME> Last Updated: 28/02/2017 \"\"\"", "pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() #", "detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create Data and Report objects, providing necessary information", "None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\") if window is not None: window.step_progress()", "filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\")", "wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w', newline='') as fp: wr", "file name Keyword arguments: location -- A file path. \"\"\" return location.rpartition('\\\\') def", "filename) window.setstatus(\"ERROR: Unable to read file: \" + filename) if exporter is not", "0/7] Converting to csv file\") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names)", "excel files have multiple sheets print(\"Error, different number of files and templates\") else:", "+ \"\\n\") fp.write(\"Total Valid Columns: \" + str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \"", "that creates files instead of generating HTML Reports Author: <NAME> Last Updated: 28/02/2017", "is not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if window is not", "exporting class that creates files instead of generating HTML Reports Author: <NAME> Last", "'*.csv')], defaultextension=\"*.csv\") if template is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel", "self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates a file containing", "file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow =", "1/7] Processing file: \",filename) print(\"[Step 2/7] Reading data\") if window is not None:", "3/7] Running pre-analysis\") if window is not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding", "filenames: main(name, exporter=export, window=window) if export != None: export.write_summary() if excel: for file", "Report \" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed: \" + str(self.total_files) +", "in filenames: main(name, templates[0], exporter=export, window=window) else: num_templates = len(templates) print(num_templates) num_files =", "single data object write_summary -- writes summary of all files to be run", "templates\") else: for name in filenames: main(name, exporter=export, window=window) if export != None:", "to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or more filenames for the processor to analyse')", "= Report(data) str_report = report.html_report() html = report.gen_html(str_report) # returns string of html,", "= filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')],", "window is not None: window.setstatus(\"WARNING: Unsupported file type \" + file) if exportfile", "Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s) to process: \").grid(row=0, sticky=E,", "csv files. Can specify template to describe data further. Templates can be used", "terminal = False \"\"\" Global Variables: terminal -- boolean value whether program is", "if exporter is not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7]", "processed total_invalid -- total number of invalid rows total_empty -- total number of", "to process and displays them in the output window dataaskopenfolder -- Asks for", "removefile(self, file, label): \"\"\"Removes file from process list and removes label\"\"\" print(\"Removing: \",", "= 0 self.setstatus(\"Processing Files...\") exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv", "running through terminal or through GUI progress -- Progress bar showing progress through", "+ self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder != '': self.datafiles = []", "\"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow)", "window.step_progress() print(\"Completed analysis for: \", filename) if window is not None: window.setstatus(\"Completed Analysis", "Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and generates report at the end\"\"\"", "for File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks for template to use in", "data.find_errors() print(\"[Step 5/7] Running Analysis\") if window is not None: window.step_progress() window.setstatus(\"Running Analysis", "program and generates report at the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files,", "templates: if len(templates) == 1: for name in filenames: main(name, templates[0], exporter=export, window=window)", "webbrowser to create template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program", "max): self.progress[\"maximum\"] = max def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object):", "applicable \"\"\" def __init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template", "displays it in the output window Variables: datafiles -- list of datafiles to", "to process and displays them in the output window\"\"\" self.reset() if self.template: Label(self.display,", "None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected: \" +", "is not None: window.step_progress() window.setstatus(\"Running Analysis on \" + filename + \"...\") data.analysis()", "or writing to exported file). Keyword Arguments: args -- Arguments provided to the", "fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If", "command line execution begins here. This will process all the command line arguments", "self.filename = filename self.total_files = 0 self.total_invalid = 0 self.total_empty = 0 self.total_errors", "open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') self.total_files", "bar back to the start templateaskopenfile -- Asks for a template to use", "Report Successfully Generated\") print(\"Completed analysis for: \",filename) if window is not None: window.step_progress()", "return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR: Unsupported file", "and more explanatory way Methods: dataaskopenfile -- Asks for files to process and", "creates a file containing analysis of all files run in program Methods: write_stats", "Generated\") if window is not None: window.step_progress() print(\"Completed analysis for: \", filename) if", "of generating HTML Reports Author: <NAME> Last Updated: 28/02/2017 \"\"\" import argparse import", "fp.write(\"Number of Valid Columns: \" + str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns)) if", "Converting to csv file\") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) ==", "\" + str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns: \" + str(self.total_col) + \"\\n\")", "of files processed total_invalid -- total number of invalid rows total_empty -- total", "= [] excel = [] for file in files: name_ext = os.path.splitext(file) #", "Successfully Generated\") if window is not None: window.step_progress() print(\"Completed analysis for: \", filename)", "exportfile.name, 'window': self}).start() except PermissionError: # Occurs if export file is open self.setstatus(\"ERROR:", "Create template web page of Data-oracle website process_report -- Runs program and generates", "arguments: location -- A file path. \"\"\" return location.rpartition('\\\\') def process_files(files, templates, exportfile='',", "-- boolean value whether program is running through terminal or through GUI progress", "files: name_ext = os.path.splitext(file) # TODO handle empty sheets if name_ext[1] == '.xls'", "= [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" + folder),", "file in files: name_ext = os.path.splitext(file) # TODO handle empty sheets if name_ext[1]", "Methods: dataaskopenfile -- Asks for files to process and displays them in the", "fp.write(\"Number of Error Cells: \" + str(len(data.errors)) + '\\n') self.total_errors = len(data.errors) fp.write(\"Number", "of all files to be run after processing all files Variables: filename --", "to use during processing and displays it in the output window Variables: datafiles", "datafiles -- list of datafiles to be processed display -- output window Frame", "else: export = None if window is not None: window.setmaxprogress(len(filenames) * 5.0 +", "\", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for", "if export != None: export.write_summary() if excel: for file in excel: os.remove(file) if", "Resets the program removing all files from the process queue and sets progress", "Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template is not None: self.template.append(template.name) if", "processed after being selected in output window reset -- Resets the program removing", "and exports results to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile = ''", "on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and generates report at the", "anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder", "# Occurs if export file is open self.setstatus(\"ERROR: Permission Denied, ensure export file", "\" + filename + \"...\") data.analysis() if exporter is None: print(\"[Step 6/7] Generating", "the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self):", "object template -- template to use in process if applicable \"\"\" def __init__(self):", "a file containing analysis of all files processed removefile -- Removes file from", "len(data.errors) fp.write(\"Number of Valid Columns: \" + str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns))", "file as total_files -- total number of files processed total_invalid -- total number", "file path. \"\"\" return location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None): \"\"\"Process files and", "= Template(args[1]) data = Data(filename, temp) else: data = Data(filename) if not data.raw_data:", "not None: window.step_progress() window.setstatus(\"Running Analysis on \" + filename + \"...\") data.analysis() if", ".template_reader import * except: from data import * from report import * from", "label): \"\"\"Removes file from process list and removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file)", "Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start()", "if window is not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates !=", "+= 1 fp.write(\"Number of Invalid rows: \" + str(len(data.invalid_rows)) + '\\n') self.total_invalid +=", "mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s) to process:", "Error Cells: \" + str(len(data.errors)) + '\\n') self.total_errors = len(data.errors) fp.write(\"Number of Valid", "as fp: pass def write_stats(self, data): \"\"\"Writes statistics of a single data object\"\"\"", "-- total numher of errors throughout files \"\"\" def __init__(self, filename, offline=True): self.filename", "or more csv files. If using multiple templates for multiple files list templates", "anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X)", "Window mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s) to", "write_stats -- writes summary of a single data object write_summary -- writes summary", "use in process if applicable \"\"\" def __init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\")", "csv file\") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) == 1: sh", "'.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template is not None: self.template.append(template.name) if hasattr(self,", "files. Can specify template to describe data further. Templates can be used to", "for file in files: label = Label(self.display, text=str(\"\\t\" + file), anchor='w') if os.name", "'__main__': \"\"\"If the program is run with application.py as the argument to the", "exporter=export, window=window) if export != None: export.write_summary() if excel: for file in excel:", "text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2,", "fp.write(\"Number of Invalid rows: \" + str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows) empty_columns", "window reset -- Resets the program removing all files from the process queue", "in the output window Variables: datafiles -- list of datafiles to be processed", "None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\") if window is not None:", "for i in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else: # TODO keep", "template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template is", "of empty columns total_errors -- total numher of errors throughout files \"\"\" def", "of all files processed removefile -- Removes file from being processed after being", "open(self.filename, 'w') as fp: pass def write_stats(self, data): \"\"\"Writes statistics of a single", "for file in excel: os.remove(file) if __name__ == '__main__': \"\"\"If the program is", "= argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can process one or more csv", "* from .template_reader import * except: from data import * from report import", "not None: Label(self.display, text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if", "in program Methods: write_stats -- writes summary of a single data object write_summary", "templates[0], exporter=export, window=window) else: num_templates = len(templates) print(num_templates) num_files = len(filenames) if num_templates", "= xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name", "fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') self.total_files += 1 fp.write(\"Number", "column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\")", "window=None): \"\"\"Process files and templates and runs the program over them. Converts excel", "file, label): \"\"\"Removes file from process list and removes label\"\"\" print(\"Removing: \", file)", "in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv file already exists and", "= kwargs.pop('window', None) filename = args[0] print(\"[Step 1/7] Processing file: \",filename) print(\"[Step 2/7]", "errors throughout files \"\"\" def __init__(self, filename, offline=True): self.filename = filename self.total_files =", "= False \"\"\" Global Variables: terminal -- boolean value whether program is running", "or templates: if len(templates) == 1: for name in filenames: main(name, templates[0], exporter=export,", "showing progress through program \"\"\" class DisplayWindow: \"\"\"GUI for application allowing users to", "+ os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed: \" + str(self.total_files) + \"\\n\") fp.write(\"Total", "filenames: main(name, templates[0], exporter=export, window=window) else: num_templates = len(templates) print(num_templates) num_files = len(filenames)", "template to describe data further. Templates can be used to describe one or", "rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks for template", "save export file as total_files -- total number of files processed total_invalid --", "dataaskopenfile(self): \"\"\" Asks for files to process and displays them in the output", "if window is not None: window.step_progress() window.setstatus(\"Processing \" + filename + \"...\") if", "self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return self.datafiles", "if templates != None or templates: if len(templates) == 1: for name in", "fp.write(\"Number of Empty Columns: \" + str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns) fp.write(\"Number", "-- Progress bar showing progress through program \"\"\" class DisplayWindow: \"\"\"GUI for application", "if num_templates == num_files: for i in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window)", "\" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return self.template def setmaxprogress(self,", "from report import * from template_reader import * terminal = False \"\"\" Global", "self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for column in data.columns if column.empty] fp.write(\"Number", "and displays it in the output window Variables: datafiles -- list of datafiles", "process and displays the contained files in the output window filetext -- Fills", "in filenames: main(name, exporter=export, window=window) if export != None: export.write_summary() if excel: for", "end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs", "program removing all files from the process queue and sets progress bar back", "= None # Main Window mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select", "window is not None: window.step_progress() print(\"Completed analysis for: \", filename) if window is", "\"\"\" Create Data and Report objects, providing necessary information for them to run", "Data-oracle website process_report -- Runs program and generates report for all files processed", "Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1,", "as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: #", "being processed after being selected in output window reset -- Resets the program", "one or more csv files. Can specify template to describe data further. Templates", "sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status", "Process Folder...\") return self.template def setmaxprogress(self, max): self.progress[\"maximum\"] = max def step_progress(self): self.progress.step()", "setmaxprogress(self, max): self.progress[\"maximum\"] = max def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class", "files have multiple sheets print(\"Error, different number of files and templates\") else: for", "fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes error message for files not", "open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum))", "templates for multiple files list templates in the same order as the files", "necessary information for them to run analysis and create desired outputs (i.e. HTML", "new_name + \" is not open in another program\") return None filenames.append(new_name) excel.append(new_name)", "file with the file name Keyword arguments: location -- A file path. \"\"\"", "process if applicable \"\"\" def __init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles =", "None # Main Window mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s)", "def process_export(self): \"\"\"Runs program and exports results to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing", "if exportfile != '': export = Exporter(exportfile) else: export = None if window", "Empty Columns: \" + str(self.total_empty) + \"\\n\") fp.write(\"Total Valid Columns: \" + str(self.total_col)", "list templates in the same order as the files they correspond to. '''))", "parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given files') args = parser.parse_args() process_files(args.filenames,", "and displays them in the output window dataaskopenfolder -- Asks for folder to", "output window\"\"\" self.reset() if self.template is not None: Label(self.display, text=str(\"Template Selected: \" +", "export = None if window is not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01)", "Report Successfully Generated\") if window is not None: window.step_progress() print(\"Completed analysis for: \",", "window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step", "filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template is not None: self.template.append(template.name)", "self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to", "* except: from data import * from report import * from template_reader import", "template file(optional): \").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\",", "sticky=N) # Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting for File...\") status = Label(root,", "self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks for template to use", "Last Updated: 28/02/2017 \"\"\" import argparse import webbrowser import textwrap import xlrd from", "= 0 self.total_invalid = 0 self.total_empty = 0 self.total_errors = 0 self.total_col =", "\"\"\" files = [] templates = [] if len(sys.argv) > 1: terminal =", "\" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to read file, no readable data", "files): \"\"\"Provides text for output box given a list of files\"\"\" remove_file =", "Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5,", "for File...\") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def", "= filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template is not", "or more csv files. Can specify template to describe data further. Templates can", "at the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def", "\" + str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors) + \"\\n\\n\") with", "= Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s) to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow,", "for debugging purposes print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed analysis for: \",filename) if", "processed templates -- files to use as templates in processing exportfile -- file", "= [] for file in files: name_ext = os.path.splitext(file) # TODO handle empty", "'window': self}).start() except PermissionError: # Occurs if export file is open self.setstatus(\"ERROR: Permission", "with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows):", "mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7,", "sheets print(\"Error, different number of files and templates\") else: for name in filenames:", "the output window dataaskopenfolder -- Asks for folder to process and displays the", "pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar self.statusText", "Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5)", "self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N)", "writes summary of all files to be run after processing all files Variables:", "data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type + '\\n') fp.write(\"\\n\")", "columns total_errors -- total numher of errors throughout files \"\"\" def __init__(self, filename,", "\".csv\") try: with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for rownum", "== '.csv': filenames.append(file) else: print(\"ERROR: Unsupported file type: \" + file) if window", "to use in processing\"\"\" self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'),", "args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and exports results to file\"\"\" self.progress[\"value\"]", "def __init__(self, filename, offline=True): self.filename = filename self.total_files = 0 self.total_invalid = 0", "output window reset -- Resets the program removing all files from the process", "the program at runtime. exporter -- Exporter object if applicable \"\"\" exporter =", "def process_report(self): \"\"\"Runs program and generates report at the end\"\"\" self.progress[\"value\"] = 0", "Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks for", "generating HTML Reports Author: <NAME> Last Updated: 28/02/2017 \"\"\" import argparse import webbrowser", "str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns: \" + str(self.total_empty) + \"\\n\") fp.write(\"Total Valid", "all files processed removefile -- Removes file from being processed after being selected", "files list templates in the same order as the files they correspond to.", "files processed removefile -- Removes file from being processed after being selected in", "file in excel: os.remove(file) if __name__ == '__main__': \"\"\"If the program is run", "Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6,", "the program removing all files from the process queue and sets progress bar", "process_report -- Runs program and generates report for all files processed process_export --", "and displays the contained files in the output window filetext -- Fills output", "print(\"[Step 7/7] Report Successfully Generated\") if window is not None: window.step_progress() print(\"Completed analysis", "self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"]", "all files Variables: filename -- file name to save export file as total_files", "0 self.total_errors = 0 self.total_col = 0 if not offline: with open(self.filename, 'w')", "Analysis\") if window is not None: window.step_progress() window.setstatus(\"Running Analysis on \" + filename", "= StringVar() self.statusText.set(\"Waiting for File...\") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM,", "reset -- Resets the program removing all files from the process queue and", "\" + filename) window.setstatus(\"ERROR: Unable to read file: \" + filename) if exporter", "the process queue and sets progress bar back to the start templateaskopenfile --", "28/02/2017 \"\"\" import argparse import webbrowser import textwrap import xlrd from tkinter import", "write_stats(self, data): \"\"\"Writes statistics of a single data object\"\"\" with open(self.filename, 'r+') as", "of the file with the file name Keyword arguments: location -- A file", "'.csv': filenames.append(file) else: print(\"ERROR: Unsupported file type: \" + file) if window is", "in processing\"\"\" self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files',", "to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10) label3", "program\") def removefile(self, file, label): \"\"\"Removes file from process list and removes label\"\"\"", "os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self,", "Contains GUI interface and exporting class that creates files instead of generating HTML", "metavar='template', help='a template for the given files') args = parser.parse_args() process_files(args.filenames, args.t) else:", "[] if len(sys.argv) > 1: terminal = True pathname = os.path.dirname(sys.argv[0]) parser =", "number of invalid rows total_empty -- total number of empty columns total_errors --", "if window is not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if window", "args[0] print(\"[Step 1/7] Processing file: \",filename) print(\"[Step 2/7] Reading data\") if window is", "if window is not None: window.step_progress() print(\"Completed analysis for: \", filename) if window", "parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can process one or more", "Occurs if export file is open self.setstatus(\"ERROR: Permission Denied, ensure export file is", "runs the program over them. Converts excel files and applies template to each", "folder def filetext(self, files): \"\"\"Provides text for output box given a list of", "label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser to create template page on Data-oracle website\"\"\"", "+ file) if exportfile != '': export = Exporter(exportfile) else: export = None", "\" + str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r') as fd: for line in", "window.step_progress() window.setstatus(\"Running Analysis on \" + filename + \"...\") data.analysis() if exporter is", "name in filenames: main(name, exporter=export, window=window) if export != None: export.write_summary() if excel:", "Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is not", "for output box given a list of files\"\"\" remove_file = lambda x, m:", "data.clean() print(\"[Step 3/7] Running pre-analysis\") if window is not None: window.step_progress() data.pre_analysis() print(\"[Step", "have multiple sheets print(\"Error, different number of files and templates\") else: for name", "help='a template for the given files') args = parser.parse_args() process_files(args.filenames, args.t) else: DisplayWindow()", "newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name)", "files \"\"\" def __init__(self, filename, offline=True): self.filename = filename self.total_files = 0 self.total_invalid", "self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: # Occurs if export file is", "list of datafiles to be processed display -- output window Frame object template", "list of files\"\"\" remove_file = lambda x, m: (lambda p: self.removefile(x, m)) for", "for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names: sh", "window Variables: datafiles -- list of datafiles to be processed display -- output", "explanatory way Methods: dataaskopenfile -- Asks for files to process and displays them", "error message for files not processed fully\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2)", "-- Exporter object if applicable \"\"\" exporter = kwargs.pop('exporter', None) window = kwargs.pop('window',", "is not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\")", "wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created", "be used to describe one or more csv files. If using multiple templates", "file from being processed after being selected in output window reset -- Resets", "temp) else: data = Data(filename) if not data.raw_data: print(\"ERROR: Unable to read file:", "file, no readable data detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create Data and Report", "containing analysis of all files run in program Methods: write_stats -- writes summary", "if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def", "fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow,", "sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress", "exports results to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile = '' try:", "else: num_templates = len(templates) print(num_templates) num_files = len(filenames) if num_templates == num_files: for", "Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: # Occurs if export", "process queue and sets progress bar back to the start templateaskopenfile -- Asks", "the output window\"\"\" self.reset() if self.template: Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X)", "file in self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\")", "\"\"\" import argparse import webbrowser import textwrap import xlrd from tkinter import *", "sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks for template to", "args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: # Occurs if export file", "\" + os.path.split(data.filename)[1] + '\\n') self.total_files += 1 fp.write(\"Number of Invalid rows: \"", "in the output window dataaskopenfolder -- Asks for folder to process and displays", "output window filetext -- Fills output box given a list of files maketemplate", "analysis for: \", filename) if window is not None: window.setstatus(\"Completed Analysis for \"", "object\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] +", "self.template: Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files',", "= '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')])", "self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow)", "window.setstatus(\"Processing \" + filename + \"...\") if len(args) > 1: temp = Template(args[1])", "StringVar() self.statusText.set(\"Waiting for File...\") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X)", "in the output window filetext -- Fills output box given a list of", "self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel", "Runs program and creates a file containing analysis of all files processed removefile", "Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles", "output window Variables: datafiles -- list of datafiles to be processed display --", "self.datafiles is not None: self.datafiles = [file.name for file in self.datafiles] Label(self.display, text=\"Selected", "Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return self.template def", "to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile = '' try: exportfile =", "if column.empty] fp.write(\"Number of Empty Columns: \" + str(len(empty_columns)) + '\\n') self.total_empty =", "them to run analysis and create desired outputs (i.e. HTML report or writing", "empty_columns = [column.header for column in data.columns if column.empty] fp.write(\"Number of Empty Columns:", "fp.write(\"Total Valid Columns: \" + str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors)", "\"\"\"Process files and templates and runs the program over them. Converts excel files", "\"...\") data.analysis() if exporter is None: print(\"[Step 6/7] Generating report\") report = Report(data)", "-- Asks for files to process and displays them in the output window", "Arguments provided to the program at runtime. exporter -- Exporter object if applicable", "-- files to use as templates in processing exportfile -- file to export", "def reset(self): \"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display", "during processing and displays it in the output window Variables: datafiles -- list", "\"\"\"Runs program and exports results to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile", "of files\"\"\" remove_file = lambda x, m: (lambda p: self.removefile(x, m)) for file", "text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow,", "being selected in output window reset -- Resets the program removing all files", "= Label(self.display, text=str(\"\\t\" + file), anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label))", "-- Runs program and creates a file containing analysis of all files processed", "more csv files. Can specify template to describe data further. Templates can be", "if window is not None: window.step_progress() window.setstatus(\"Running Analysis on \" + filename +", "\", file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow", "= 0 self.total_col = 0 if not offline: with open(self.filename, 'w') as fp:", "exportfile != '': export = Exporter(exportfile) else: export = None if window is", "is not open in another program\") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] ==", "Bar self.statusText = StringVar() self.statusText.set(\"Waiting for File...\") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN,", "text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder != '':", "PermissionError: # If created csv file already exists and is open window.setstatus(\"ERROR: Permission", "file: \" + filename) if exporter is not None: exporter.write_error(data) return None data.remove_invalid()", "\"\"\"Runs program and generates report at the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\")", "0 self.setstatus(\"Processing Files...\") exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files',", "wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet", "# TODO handle empty sheets if name_ext[1] == '.xls' or name_ext[1] == '.xlsx':", "value whether program is running through terminal or through GUI progress -- Progress", "template to each file Keyword arguments: files -- files to be processed templates", "= 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and", "import * from .report import * from .template_reader import * except: from data", "process and displays the contained files in the output window\"\"\" self.reset() if self.template", "exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles,", "report or writing to exported file). Keyword Arguments: args -- Arguments provided to", "for a template to use during processing and displays it in the output", "Analysis on \" + filename + \"...\") data.analysis() if exporter is None: print(\"[Step", "m)) for file in files: label = Label(self.display, text=str(\"\\t\" + file), anchor='w') if", "is open self.setstatus(\"ERROR: Permission Denied, ensure export file is not open in another", "Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and exports results to file\"\"\"", "write_summary -- writes summary of all files to be run after processing all", "sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text=\">", "elif name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR: Unsupported file type: \" + file)", "text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew',", "whether program is running through terminal or through GUI progress -- Progress bar", "+ str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows: \" + str(self.total_invalid) + \"\\n\") fp.write(\"Total", "window=window) else: # TODO keep functionality when excel files have multiple sheets print(\"Error,", "Can specify template to describe data further. Templates can be used to describe", "execution body for program. Contains GUI interface and exporting class that creates files", "file is open self.setstatus(\"ERROR: Permission Denied, ensure export file is not open in", "way Methods: dataaskopenfile -- Asks for files to process and displays them in", "comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary", "defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name,", "boolean value whether program is running through terminal or through GUI progress --", "data further. Templates can be used to describe one or more csv files.", "data.analysis() if exporter is None: print(\"[Step 6/7] Generating report\") report = Report(data) str_report", "range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv file already exists and is", "Errors\") if window is not None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\") if", "Runs program and generates report for all files processed process_export -- Runs program", "program over them. Converts excel files and applies template to each file Keyword", "is not None: window.step_progress() data.find_errors() print(\"[Step 5/7] Running Analysis\") if window is not", "__init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template = None #", "= filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template),", "all files run in program Methods: write_stats -- writes summary of a single", "return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder to process and displays the contained", "offline: with open(self.filename, 'w') as fp: pass def write_stats(self, data): \"\"\"Writes statistics of", "anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return self.template def setmaxprogress(self, max): self.progress[\"maximum\"] =", "---------------------------------- Can process one or more csv files. Can specify template to describe", "Permission Denied, ensure \" + new_name + \" is not open in another", "as the argument to the command line execution begins here. This will process", "name Keyword arguments: location -- A file path. \"\"\" return location.rpartition('\\\\') def process_files(files,", "self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...',", "Rows: \" + str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns: \" + str(self.total_empty) +", "== num_files: for i in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else: #", "them. Converts excel files and applies template to each file Keyword arguments: files", "in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else: # TODO keep functionality when", "use during processing and displays it in the output window Variables: datafiles --", "if excel: for file in excel: os.remove(file) if __name__ == '__main__': \"\"\"If the", "and exporting class that creates files instead of generating HTML Reports Author: <NAME>", "\"\"\" def __init__(self, filename, offline=True): self.filename = filename self.total_files = 0 self.total_invalid =", "remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser to", "applicable \"\"\" exporter = kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename = args[0]", "for application allowing users to interact with program in simpler and more explanatory", "to export analysis to if applicable \"\"\" filenames = [] excel = []", "analysis of all files processed removefile -- Removes file from being processed after", "command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\",", "csv files. If using multiple templates for multiple files list templates in the", "use as templates in processing exportfile -- file to export analysis to if", "\" + file) if exportfile != '': export = Exporter(exportfile) else: export =", "folder != '': self.datafiles = [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected", "the command line arguments before proceeding. \"\"\" files = [] templates = []", "output box given a list of files maketemplate -- Links to Create template", "is None: print(\"[Step 6/7] Generating report\") report = Report(data) str_report = report.html_report() html", "A file path. \"\"\" return location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None): \"\"\"Process files", "Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: #", "to describe data further. Templates can be used to describe one or more", "'': export = Exporter(exportfile) else: export = None if window is not None:", "generates report at the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template),", "Exporter(object): \"\"\"Class that creates a file containing analysis of all files run in", "None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully", "print(\"ERROR: Unable to read file: \" + filename) window.setstatus(\"ERROR: Unable to read file:", "temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as fp: fp.write(\"Error Report \" +", "analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given files') args = parser.parse_args()", "command line arguments before proceeding. \"\"\" files = [] templates = [] if", "for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes error", "the directory of the file with the file name Keyword arguments: location --", "with open(self.filename, 'r') as fd: for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename)", "Files Analysed: \" + str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows: \" + str(self.total_invalid)", "to interact with program in simpler and more explanatory way Methods: dataaskopenfile --", "+ file) if window is not None: window.setstatus(\"WARNING: Unsupported file type \" +", "application allowing users to interact with program in simpler and more explanatory way", "of Invalid rows: \" + str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows) empty_columns =", "= os.path.splitext(file) # TODO handle empty sheets if name_ext[1] == '.xls' or name_ext[1]", "if window is not None: window.setstatus(\"Completed Analysis for \" + filename) def get_file_dir(location):", "str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns) fp.write(\"Number of Error Cells: \" + str(len(data.errors))", "Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting for File...\") status = Label(root, textvariable=self.statusText, bd=1,", "* from tkinter import filedialog, ttk from threading import Thread try: from .data", "Unsupported file type: \" + file) if window is not None: window.setstatus(\"WARNING: Unsupported", "information for them to run analysis and create desired outputs (i.e. HTML report", "creates a file containing analysis of all files processed removefile -- Removes file", "or Folder(s) to process: \").grid(row=0, sticky=E, pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E,", "else: # TODO keep functionality when excel files have multiple sheets print(\"Error, different", "command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew')", "not None: self.datafiles = [file.name for file in self.datafiles] Label(self.display, text=\"Selected Files: \",", "for them to run analysis and create desired outputs (i.e. HTML report or", "6/7] Generating report\") report = Report(data) str_report = report.html_report() html = report.gen_html(str_report) #", "main(*args, **kwargs): \"\"\" Create Data and Report objects, providing necessary information for them", "of all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as fp:", "pady=10) Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text=\"> Create", "file), anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label))", "'w', newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except", "def __init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template = None", "[file.name for file in self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to", "of Error Cells: \" + str(len(data.errors)) + '\\n') self.total_errors = len(data.errors) fp.write(\"Number of", "excel.append(new_name) else: for sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] +", "is not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates != None or", "if window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data)", "from tkinter import filedialog, ttk from threading import Thread try: from .data import", "list of files maketemplate -- Links to Create template web page of Data-oracle", "file is not open in another program\") def removefile(self, file, label): \"\"\"Removes file", "-- Asks for folder to process and displays the contained files in the", "not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report", "command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew',", "more explanatory way Methods: dataaskopenfile -- Asks for files to process and displays", "single data object\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" +", "print(\"[Step 2/7] Reading data\") if window is not None: window.step_progress() window.setstatus(\"Processing \" +", "'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready", "+ sheet + \".csv\") try: with open(new_name, 'w', newline='') as fp: wr =", "files: label = Label(self.display, text=str(\"\\t\" + file), anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\",", "dataaskopenfile -- Asks for files to process and displays them in the output", "not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates != None or templates:", "with application.py as the argument to the command line execution begins here. This", "Processing file: \",filename) print(\"[Step 2/7] Reading data\") if window is not None: window.step_progress()", "self.datafiles = [] self.template = None # Main Window mainwindow = Frame(root) self.display", "order as the files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or more", "('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template is not None: self.template.append(template.name) if hasattr(self, 'templateLabel'):", "for name in filenames: main(name, exporter=export, window=window) if export != None: export.write_summary() if", "not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if window is not None:", "\"\"\"Returns the directory of the file with the file name Keyword arguments: location", "fp.write(\"Delimiter: \" + data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of all", "to save export file as total_files -- total number of files processed total_invalid", "len(templates) print(num_templates) num_files = len(filenames) if num_templates == num_files: for i in range(0,", "newline='') as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError:", "a template to use during processing and displays it in the output window", "total number of empty columns total_errors -- total numher of errors throughout files", "arguments: files -- files to be processed templates -- files to use as", "data\") if window is not None: window.step_progress() window.setstatus(\"Processing \" + filename + \"...\")", "x, m: (lambda p: self.removefile(x, m)) for file in files: label = Label(self.display,", "self.total_col = 0 if not offline: with open(self.filename, 'w') as fp: pass def", "data): \"\"\"Writes error message for files not processed fully\"\"\" with open(self.filename, 'r+') as", "run after processing all files Variables: filename -- file name to save export", "str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r') as fd: for line in fd: fp.write(line)", "the program over them. Converts excel files and applies template to each file", "window.setstatus(\"ERROR: Unable to read file: \" + filename) if exporter is not None:", "hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X)", "to use in process if applicable \"\"\" def __init__(self): root = Tk() root.wm_title(\"UWA", "self.total_invalid = 0 self.total_empty = 0 self.total_errors = 0 self.total_col = 0 if", "6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\") if window is not", "process_report(self): \"\"\"Runs program and generates report at the end\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing", "new_name = os.path.join(os.path.splitext(file)[0] + \"_\" + sheet + \".csv\") try: with open(new_name, 'w',", "= report.html_report() html = report.gen_html(str_report) # returns string of html, also generates html", "create template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and generates", "Create Data and Report objects, providing necessary information for them to run analysis", "Files...\") exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All", "if applicable \"\"\" def __init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = []", "+ file), anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file,", "Invalid rows: \" + str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header", "+ '\\n') self.total_files += 1 fp.write(\"Number of Invalid rows: \" + str(len(data.invalid_rows)) +", "if self.template is not None: Label(self.display, text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X) folder", "7/7] Report Successfully Generated\") print(\"Completed analysis for: \",filename) if window is not None:", "anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files): \"\"\"Provides text for output box given", "\" + filename) if exporter is not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns()", "statistics of a single data object\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis", "for files to process and displays them in the output window\"\"\" self.reset() if", "self}).start() except PermissionError: # Occurs if export file is open self.setstatus(\"ERROR: Permission Denied,", "== 1: for name in filenames: main(name, templates[0], exporter=export, window=window) else: num_templates =", "self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected: \" + self.template[0]),", "total number of invalid rows total_empty -- total number of empty columns total_errors", "+ \"...\") data.analysis() if exporter is None: print(\"[Step 6/7] Generating report\") report =", "Data and Report objects, providing necessary information for them to run analysis and", "name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting to csv file\") wb = xlrd.open_workbook(file) sheet_names", "queue and sets progress bar back to the start templateaskopenfile -- Asks for", "summary of all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as", "\"\"\" Global Variables: terminal -- boolean value whether program is running through terminal", "exporter is not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running", "= os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp)", "through program \"\"\" class DisplayWindow: \"\"\"GUI for application allowing users to interact with", "self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting", "of all files run in program Methods: write_stats -- writes summary of a", "-- total number of empty columns total_errors -- total numher of errors throughout", "for the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given", "progress through program \"\"\" class DisplayWindow: \"\"\"GUI for application allowing users to interact", "window.step_progress() window.setstatus(\"Processing \" + filename + \"...\") if len(args) > 1: temp =", "def removefile(self, file, label): \"\"\"Removes file from process list and removes label\"\"\" print(\"Removing:", "return self.template def setmaxprogress(self, max): self.progress[\"maximum\"] = max def step_progress(self): self.progress.step() def setstatus(self,", "lambda x, m: (lambda p: self.removefile(x, m)) for file in files: label =", "if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w')", "fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to read", "as templates in processing exportfile -- file to export analysis to if applicable", "self.setstatus(\"Processing Files...\") exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'),", "report import * from template_reader import * terminal = False \"\"\" Global Variables:", "report = Report(data) str_report = report.html_report() html = report.gen_html(str_report) # returns string of", "\" + str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns) fp.write(\"Number of Error Cells: \"", "0.01) if templates != None or templates: if len(templates) == 1: for name", "self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return self.template def setmaxprogress(self, max): self.progress[\"maximum\"] = max", "in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names: sh = wb.sheet_by_name(sheet)", "file: \",filename) print(\"[Step 2/7] Reading data\") if window is not None: window.step_progress() window.setstatus(\"Processing", "processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given files') args", "from .report import * from .template_reader import * except: from data import *", "processing\"\"\" self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')],", "= [file.name for file in self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready", "in excel: os.remove(file) if __name__ == '__main__': \"\"\"If the program is run with", "files to be run after processing all files Variables: filename -- file name", "self.template is not None: Label(self.display, text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X) folder =", "[] for file in files: name_ext = os.path.splitext(file) # TODO handle empty sheets", "import * from template_reader import * terminal = False \"\"\" Global Variables: terminal", "Columns: \" + str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns) fp.write(\"Number of Error Cells:", "desired outputs (i.e. HTML report or writing to exported file). Keyword Arguments: args", "as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to", "Exporter(exportfile) else: export = None if window is not None: window.setmaxprogress(len(filenames) * 5.0", "the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for the given files')", "contained files in the output window\"\"\" self.reset() if self.template is not None: Label(self.display,", "temp = Template(args[1]) data = Data(filename, temp) else: data = Data(filename) if not", "type: \" + file) if window is not None: window.setstatus(\"WARNING: Unsupported file type", "Selected: \" + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder != '': self.datafiles", "sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5,", "wb.sheet_names() if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\"", "processed process_export -- Runs program and creates a file containing analysis of all", "of a single data object\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of", "Empty Columns: \" + str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns) fp.write(\"Number of Error", "text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\",", "False \"\"\" Global Variables: terminal -- boolean value whether program is running through", "Valid Columns: \" + str(len(data.columns)) + '\\n') self.total_col = str(len(data.columns)) if data.delimiter_type ==", "the files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or more filenames for", "the argument to the command line execution begins here. This will process all", "'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') self.total_files +=", "else: data = Data(filename) if not data.raw_data: print(\"ERROR: Unable to read file: \"", "describe data further. Templates can be used to describe one or more csv", "sheet_names = wb.sheet_names() if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0]", "removefile -- Removes file from being processed after being selected in output window", "import xlrd from tkinter import * from tkinter import filedialog, ttk from threading", "* from report import * from template_reader import * terminal = False \"\"\"", "filenames for the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a template for the", "def write_stats(self, data): \"\"\"Writes statistics of a single data object\"\"\" with open(self.filename, 'r+')", "filename -- file name to save export file as total_files -- total number", "and creates a file containing analysis of all files processed removefile -- Removes", "files = [] templates = [] if len(sys.argv) > 1: terminal = True", "[column.header for column in data.columns if column.empty] fp.write(\"Number of Empty Columns: \" +", "of html, also generates html report for debugging purposes print(\"[Step 7/7] Report Successfully", "box given a list of files maketemplate -- Links to Create template web", "num_templates = len(templates) print(num_templates) num_files = len(filenames) if num_templates == num_files: for i", "sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w', newline='') as", "not None: self.template.append(template.name) if hasattr(self, 'templateLabel'): self.templateLabel.destroy() self.templateLabel = Label(self.display, text=str(\"Template Selected: \"", "dataaskopenfolder(self): \"\"\"Asks for folder to process and displays the contained files in the", "object write_summary -- writes summary of all files to be run after processing", "for \" + filename) def get_file_dir(location): \"\"\"Returns the directory of the file with", "DisplayWindow: \"\"\"GUI for application allowing users to interact with program in simpler and", "return location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None): \"\"\"Process files and templates and runs", "files to process and displays them in the output window\"\"\" self.reset() if self.template:", "them in the output window dataaskopenfolder -- Asks for folder to process and", "threading import Thread try: from .data import * from .report import * from", "Invalid Rows: \" + str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns: \" + str(self.total_empty)", "0 self.total_col = 0 if not offline: with open(self.filename, 'w') as fp: pass", "providing necessary information for them to run analysis and create desired outputs (i.e.", "program\") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR: Unsupported", "Templates can be used to describe one or more csv files. If using", "relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks for files to process", "print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\") if window is", "data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if window is not None: window.step_progress() data.find_errors() print(\"[Step", "arguments before proceeding. \"\"\" files = [] templates = [] if len(sys.argv) >", "self.total_errors = len(data.errors) fp.write(\"Number of Valid Columns: \" + str(len(data.columns)) + '\\n') self.total_col", "\",filename) print(\"[Step 2/7] Reading data\") if window is not None: window.step_progress() window.setstatus(\"Processing \"", "of errors throughout files \"\"\" def __init__(self, filename, offline=True): self.filename = filename self.total_files", "data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\") if window is not None: window.step_progress() data.pre_analysis()", "summary of a single data object write_summary -- writes summary of all files", "\" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed: \" + str(self.total_files) + \"\\n\")", "+ \".csv\") try: with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for", "if folder != '': self.datafiles = [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display,", "\" + filename) def get_file_dir(location): \"\"\"Returns the directory of the file with the", "before proceeding. \"\"\" files = [] templates = [] if len(sys.argv) > 1:", "fp.write(\"Total Invalid Rows: \" + str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty Columns: \" +", "objects, providing necessary information for them to run analysis and create desired outputs", "be run after processing all files Variables: filename -- file name to save", "files and templates and runs the program over them. Converts excel files and", "results to file\"\"\" self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile = '' try: exportfile", "the start templateaskopenfile -- Asks for a template to use during processing and", "self.statusText = StringVar() self.statusText.set(\"Waiting for File...\") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W)", "GUI progress -- Progress bar showing progress through program \"\"\" class DisplayWindow: \"\"\"GUI", "* 5.0 + 0.01) if templates != None or templates: if len(templates) ==", "label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser to create", "i in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else: # TODO keep functionality", "Keyword arguments: files -- files to be processed templates -- files to use", "interact with program in simpler and more explanatory way Methods: dataaskopenfile -- Asks", "padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\",", "website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and generates report at the end\"\"\" self.progress[\"value\"]", "of Empty Columns: \" + str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns) fp.write(\"Number of", "self.datafiles = [file.name for file in self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles)", ".data import * from .report import * from .template_reader import * except: from", "self.setstatus(\"ERROR: Permission Denied, ensure export file is not open in another program\") def", "= Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\")", "\" + file) if window is not None: window.setstatus(\"WARNING: Unsupported file type \"", "[] self.template = None # Main Window mainwindow = Frame(root) self.display = Frame(mainwindow)", "Can process one or more csv files. Can specify template to describe data", "Methods: write_stats -- writes summary of a single data object write_summary -- writes", "Converts excel files and applies template to each file Keyword arguments: files --", "terminal -- boolean value whether program is running through terminal or through GUI", "from threading import Thread try: from .data import * from .report import *", "total_errors -- total numher of errors throughout files \"\"\" def __init__(self, filename, offline=True):", "-- Arguments provided to the program at runtime. exporter -- Exporter object if", "2/7] Reading data\") if window is not None: window.step_progress() window.setstatus(\"Processing \" + filename", "html report for debugging purposes print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed analysis for:", "kwargs.pop('exporter', None) window = kwargs.pop('window', None) filename = args[0] print(\"[Step 1/7] Processing file:", "root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template = None # Main", "== '__main__': \"\"\"If the program is run with application.py as the argument to", "sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\" + sheet", "= os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can process one", "self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook',", "box given a list of files\"\"\" remove_file = lambda x, m: (lambda p:", "file) if exportfile != '': export = Exporter(exportfile) else: export = None if", "\"\\n\\n\") with open(self.filename, 'r') as fd: for line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file,", "as fp: wr = csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else:", "html, also generates html report for debugging purposes print(\"[Step 7/7] Report Successfully Generated\")", "using multiple templates for multiple files list templates in the same order as", "'': self.datafiles = [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \"", "is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7]", "files to process and displays them in the output window dataaskopenfolder -- Asks", "processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file, 'w') as fp: fp.write(\"Error Report \"", "> 1: terminal = True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes", "debugging purposes print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed analysis for: \",filename) if window", "Arguments: args -- Arguments provided to the program at runtime. exporter -- Exporter", "write_summary(self): \"\"\"Writes summary of all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with open( temp_file,", "File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks for template to use in processing\"\"\"", "the file with the file name Keyword arguments: location -- A file path.", "'\\n') self.total_col = str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \"", "filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile':", "'' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv', filetypes=[('Csv Files', '*.csv'), ('All Files', '.*')]) exportfile.close()", "window is not None: window.setstatus(\"Completed Analysis for \" + filename) def get_file_dir(location): \"\"\"Returns", "1 fp.write(\"Number of Invalid rows: \" + str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows)", "Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel", "os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp) for", "Asks for files to process and displays them in the output window dataaskopenfolder", "files not processed fully\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \"", "self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4,", "maketemplate -- Links to Create template web page of Data-oracle website process_report --", "excel = [] for file in files: name_ext = os.path.splitext(file) # TODO handle", "os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ---------------------------------- Can process one or", "fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of all files processed\"\"\" temp_file = os.path.join(os.path.split(self.filename)[0],\"Tempfile\") with", "datafiles to be processed display -- output window Frame object template -- template", "Author: <NAME> Last Updated: 28/02/2017 \"\"\" import argparse import webbrowser import textwrap import", "-- Runs program and generates report for all files processed process_export -- Runs", "+ str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors) + \"\\n\\n\") with open(self.filename,", "after processing all files Variables: filename -- file name to save export file", "Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4,", "mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for", "in process if applicable \"\"\" def __init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles", "of files and templates\") else: for name in filenames: main(name, exporter=export, window=window) if", "+ str(len(data.errors)) + '\\n') self.total_errors = len(data.errors) fp.write(\"Number of Valid Columns: \" +", "None: window.step_progress() print(\"Completed analysis for: \", filename) if window is not None: window.setstatus(\"Completed", "summary of all files to be run after processing all files Variables: filename", "more csv files. If using multiple templates for multiple files list templates in", "-- writes summary of all files to be run after processing all files", "kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: # Occurs if export file is open", "number of files and templates\") else: for name in filenames: main(name, exporter=export, window=window)", "text=str(\"Template Selected: \" + self.template[0]), anchor='w') self.templateLabel.pack(fill=X) self.setstatus(\"Ready to Process Folder...\") return self.template", "self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\")", "templates[i], exporter=export, window=window) else: # TODO keep functionality when excel files have multiple", "files run in program Methods: write_stats -- writes summary of a single data", "exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\") if window is", "(i.e. HTML report or writing to exported file). Keyword Arguments: args -- Arguments", "str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows: \" + str(self.total_invalid) + \"\\n\") fp.write(\"Total Empty", "in files: name_ext = os.path.splitext(file) # TODO handle empty sheets if name_ext[1] ==", "correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or more filenames for the processor to", "import textwrap import xlrd from tkinter import * from tkinter import filedialog, ttk", "describe one or more csv files. If using multiple templates for multiple files", "filedialog.askdirectory() if folder != '': self.datafiles = [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file))", "\"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r') as fd:", "None: self.datafiles = [file.name for file in self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X)", "files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or more filenames for the", "given a list of files\"\"\" remove_file = lambda x, m: (lambda p: self.removefile(x,", "= Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s) to process: \").grid(row=0,", "in sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\" + sheet +", "def setmaxprogress(self, max): self.progress[\"maximum\"] = max def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg)", "<NAME> Last Updated: 28/02/2017 \"\"\" import argparse import webbrowser import textwrap import xlrd", "same order as the files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\ help='one or", "template to use during processing and displays it in the output window Variables:", "+ '\\n') self.total_errors = len(data.errors) fp.write(\"Number of Valid Columns: \" + str(len(data.columns)) +", "os.remove(file) if __name__ == '__main__': \"\"\"If the program is run with application.py as", "window is not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates != None", "and generates report for all files processed process_export -- Runs program and creates", "export.write_summary() if excel: for file in excel: os.remove(file) if __name__ == '__main__': \"\"\"If", "self.progress[\"value\"] = 0 self.setstatus(\"Processing Files...\") exportfile = '' try: exportfile = filedialog.asksaveasfile(mode='w', defaultextension='*.csv',", "+ '\\n') self.total_col = str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter:", "== ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type + '\\n') fp.write(\"\\n\") def", "is not None: self.datafiles = [file.name for file in self.datafiles] Label(self.display, text=\"Selected Files:", "def dataaskopenfolder(self): \"\"\"Asks for folder to process and displays the contained files in", "filename self.total_files = 0 self.total_invalid = 0 self.total_empty = 0 self.total_errors = 0", "Running Analysis\") if window is not None: window.step_progress() window.setstatus(\"Running Analysis on \" +", "Unable to read file, no readable data detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create", "filename) if window is not None: window.setstatus(\"Completed Analysis for \" + filename) def", "sheet + \".csv\") try: with open(new_name, 'w', newline='') as fp: wr = csv.writer(fp)", "window.setstatus(\"Running Analysis on \" + filename + \"...\") data.analysis() if exporter is None:", "from tkinter import * from tkinter import filedialog, ttk from threading import Thread", "write_error(self, data): \"\"\"Writes error message for files not processed fully\"\"\" with open(self.filename, 'r+')", "wr.writerow(sh.row_values(rownum)) filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name =", "exporter is None: print(\"[Step 6/7] Generating report\") report = Report(data) str_report = report.html_report()", "label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse", "= Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self): \"\"\" Asks", "Label(self.display, text=str(\"Template Selected: \" + self.template[0]), anchor='w').pack(fill=X) self.datafiles = filedialog.askopenfiles(mode='r', filetypes=[('All Files', '.*'),('Csv", "= ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window", "template to use in process if applicable \"\"\" def __init__(self): root = Tk()", "for: \", filename) if window is not None: window.setstatus(\"Completed Analysis for \" +", "Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self):", "ensure \" + new_name + \" is not open in another program\") return", "None) window = kwargs.pop('window', None) filename = args[0] print(\"[Step 1/7] Processing file: \",filename)", "Generated\") print(\"Completed analysis for: \",filename) if window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else:", "= Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\", command=", "processing and displays it in the output window Variables: datafiles -- list of", "template_reader import * terminal = False \"\"\" Global Variables: terminal -- boolean value", "num_templates == num_files: for i in range(0, num_files): main(filenames[i], templates[i], exporter=export, window=window) else:", "'.*')]) exportfile.close() Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'exportfile': exportfile.name, 'window': self}).start() except PermissionError: # Occurs", "None if window is not None: window.setmaxprogress(len(filenames) * 5.0 + 0.01) if templates", "Variables: datafiles -- list of datafiles to be processed display -- output window", "be processed display -- output window Frame object template -- template to use", "processed removefile -- Removes file from being processed after being selected in output", "= len(empty_columns) fp.write(\"Number of Error Cells: \" + str(len(data.errors)) + '\\n') self.total_errors =", "'.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is", "not None: window.step_progress() window.setstatus(\"Processing \" + filename + \"...\") if len(args) > 1:", "webbrowser import textwrap import xlrd from tkinter import * from tkinter import filedialog,", "[] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv Files', '*.csv')], defaultextension=\"*.csv\") if template", "applicable \"\"\" filenames = [] excel = [] for file in files: name_ext", "as fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed: \"", "Thread try: from .data import * from .report import * from .template_reader import", "to exported file). Keyword Arguments: args -- Arguments provided to the program at", "print(\"ERROR: Unsupported file type: \" + file) if window is not None: window.setstatus(\"WARNING:", "Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks", "runtime. exporter -- Exporter object if applicable \"\"\" exporter = kwargs.pop('exporter', None) window", "analysis to if applicable \"\"\" filenames = [] excel = [] for file", "to if applicable \"\"\" filenames = [] excel = [] for file in", "This will process all the command line arguments before proceeding. \"\"\" files =", "run in program Methods: write_stats -- writes summary of a single data object", "processed fully\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1]", "* from template_reader import * terminal = False \"\"\" Global Variables: terminal --", "None: print(\"[Step 6/7] Generating report\") report = Report(data) str_report = report.html_report() html =", "file(optional): \").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate)", "report.gen_html(str_report) # returns string of html, also generates html report for debugging purposes", "# Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting for File...\") status = Label(root, textvariable=self.statusText,", "if __name__ == '__main__': \"\"\"If the program is run with application.py as the", "column.empty] fp.write(\"Number of Empty Columns: \" + str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns)", "line execution begins here. This will process all the command line arguments before", "\"...\") if len(args) > 1: temp = Template(args[1]) data = Data(filename, temp) else:", "the output window filetext -- Fills output box given a list of files", "label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all files\"\"\" mainwindow =", "of a single data object write_summary -- writes summary of all files to", "wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0] + \"_\" + sheet + \".csv\") try: with open(new_name,", "= csv.writer(fp) for rownum in range(sh.nrows): wr.writerow(sh.row_values(rownum)) except PermissionError: # If created csv", "-- Fills output box given a list of files maketemplate -- Links to", "def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates a file containing analysis", "for name in filenames: main(name, templates[0], exporter=export, window=window) else: num_templates = len(templates) print(num_templates)", "templates in the same order as the files they correspond to. ''')) parser.add_argument('filenames',", "+ \"\\n\") fp.write(\"Total Errors: \" + str(self.total_errors) + \"\\n\\n\") with open(self.filename, 'r') as", "file in files: label = Label(self.display, text=str(\"\\t\" + file), anchor='w') if os.name ==", "templates != None or templates: if len(templates) == 1: for name in filenames:", "template -- template to use in process if applicable \"\"\" def __init__(self): root", "webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self): \"\"\"Runs program and generates report at the end\"\"\" self.progress[\"value\"] =", "Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files): \"\"\"Provides text", "column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew')", "all files to be run after processing all files Variables: filename -- file", "report.html_report() html = report.gen_html(str_report) # returns string of html, also generates html report", "\"\\n\") fp.write(\"Total Valid Columns: \" + str(self.total_col) + \"\\n\") fp.write(\"Total Errors: \" +", "fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') self.total_files += 1 fp.write(\"Number of", "location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None): \"\"\"Process files and templates and runs the", "self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting for File...\")", "None: export.write_summary() if excel: for file in excel: os.remove(file) if __name__ == '__main__':", "use in processing\"\"\" self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All Files', '.*'), ('Csv", "def step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates a", "the output window\"\"\" self.reset() if self.template is not None: Label(self.display, text=str(\"Template Selected: \"", "Files...\", command= self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5)", "import * from tkinter import filedialog, ttk from threading import Thread try: from", "after being selected in output window reset -- Resets the program removing all", "and runs the program over them. Converts excel files and applies template to", "self.datafiles = [] for file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" +", "in the same order as the files they correspond to. ''')) parser.add_argument('filenames', nargs='+',\\", "\"\"\"Provides text for output box given a list of files\"\"\" remove_file = lambda", "1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\" with open(new_name, 'w', newline='')", "\"\"\"Asks for template to use in processing\"\"\" self.template = [] template = filedialog.askopenfile(mode='r',", "None) filename = args[0] print(\"[Step 1/7] Processing file: \",filename) print(\"[Step 2/7] Reading data\")", "and displays the contained files in the output window\"\"\" self.reset() if self.template is", "folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files): \"\"\"Provides text for output box", "template web page of Data-oracle website process_report -- Runs program and generates report", "file type \" + file) if exportfile != '': export = Exporter(exportfile) else:", "and removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all files\"\"\"", "open self.setstatus(\"ERROR: Permission Denied, ensure export file is not open in another program\")", "label3 = Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow, text=\"Browse Files...\",", "export != None: export.write_summary() if excel: for file in excel: os.remove(file) if __name__", "'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable", "= True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv files. ----------------------------------", "mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3, rowspan=7,", "the command line execution begins here. This will process all the command line", "+ str(len(data.invalid_rows)) + '\\n') self.total_invalid += len(data.invalid_rows) empty_columns = [column.header for column in", "is not None: window.step_progress() print(\"Completed analysis for: \", filename) if window is not", "to be run after processing all files Variables: filename -- file name to", "filenames = [] excel = [] for file in files: name_ext = os.path.splitext(file)", "= str(len(data.columns)) if data.delimiter_type == ',': fp.write(\"Delimiter: comma\\n\") else: fp.write(\"Delimiter: \" + data.delimiter_type", "self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder != '': self.datafiles = [] for", "padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5)", "Denied, ensure \" + new_name + \" is not open in another program\")", "-- Removes file from being processed after being selected in output window reset", "output window Frame object template -- template to use in process if applicable", "and is open window.setstatus(\"ERROR: Permission Denied, ensure \" + new_name + \" is", "if not data.raw_data: print(\"ERROR: Unable to read file: \" + filename) window.setstatus(\"ERROR: Unable", "report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\") if window is not None: window.step_progress()", "of Data-oracle website process_report -- Runs program and generates report for all files", "step_progress(self): self.progress.step() def setstatus(self, msg): self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates a file", "the output window Variables: datafiles -- list of datafiles to be processed display", "self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0", "= [] templates = [] if len(sys.argv) > 1: terminal = True pathname", "Running pre-analysis\") if window is not None: window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\")", "not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step 3/7] Running pre-analysis\") if", "'.xls')], defaultextension=\"*.csv\") if self.datafiles is not None: self.datafiles = [file.name for file in", "for template to use in processing\"\"\" self.template = [] template = filedialog.askopenfile(mode='r', filetypes=[('All", "column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow,", "open in another program\") def removefile(self, file, label): \"\"\"Removes file from process list", "from process list and removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def reset(self):", "html = report.gen_html(str_report) # returns string of html, also generates html report for", "filename) if exporter is not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean() print(\"[Step", "sticky=E, pady=10) label3 = Label(mainwindow, text=\"> Create Template\", fg=\"blue\") label3.bind(\"<Button-1>\", self.maketemplate) label3.grid(row=2) Button(mainwindow,", "to Process Files...\") return self.datafiles def dataaskopenfolder(self): \"\"\"Asks for folder to process and", "+ str(len(empty_columns)) + '\\n') self.total_empty = len(empty_columns) fp.write(\"Number of Error Cells: \" +", "= Data(filename, temp) else: data = Data(filename) if not data.raw_data: print(\"ERROR: Unable to", "the file name Keyword arguments: location -- A file path. \"\"\" return location.rpartition('\\\\')", "Data-oracle\") self.datafiles = [] self.template = None # Main Window mainwindow = Frame(root)", "Keyword arguments: location -- A file path. \"\"\" return location.rpartition('\\\\') def process_files(files, templates,", "another program\") return None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR:", "process_export -- Runs program and creates a file containing analysis of all files", "# Main Window mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or", "filename + \"...\") if len(args) > 1: temp = Template(args[1]) data = Data(filename,", "from .data import * from .report import * from .template_reader import * except:", "of invalid rows total_empty -- total number of empty columns total_errors -- total", "self.dataaskopenfile).grid(row=0, column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse", "'.xlsx': print(\"[Step 0/7] Converting to csv file\") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names()", "is open window.setstatus(\"ERROR: Permission Denied, ensure \" + new_name + \" is not", "def maketemplate(self, event): \"\"\"Opens webbrowser to create template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\")", "7/7] Report Successfully Generated\") if window is not None: window.step_progress() print(\"Completed analysis for:", "window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\")", "terminal or through GUI progress -- Progress bar showing progress through program \"\"\"", "file\") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) == 1: sh =", "is not None: window.step_progress() window.setstatus(\"Processing \" + filename + \"...\") if len(args) >", "argparse import webbrowser import textwrap import xlrd from tkinter import * from tkinter", "of \" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to read file, no readable", "\", filename) if window is not None: window.setstatus(\"Completed Analysis for \" + filename)", "execution begins here. This will process all the command line arguments before proceeding.", "\" + new_name + \" is not open in another program\") return None", "program and creates a file containing analysis of all files processed removefile --", "def get_file_dir(location): \"\"\"Returns the directory of the file with the file name Keyword", "open window.setstatus(\"ERROR: Permission Denied, ensure \" + new_name + \" is not open", "open( temp_file, 'w') as fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total", "defaultextension=\"*.csv\") if self.datafiles is not None: self.datafiles = [file.name for file in self.datafiles]", "class that creates files instead of generating HTML Reports Author: <NAME> Last Updated:", "Label(self.display, text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def filetext(self, files):", "= self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\")", "Label(self.display, text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder !=", "to csv file\") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) == 1:", "line in fd: fp.write(line) os.remove(self.filename) os.rename(temp_file, self.filename) def write_error(self, data): \"\"\"Writes error message", "if len(templates) == 1: for name in filenames: main(name, templates[0], exporter=export, window=window) else:", "else: fp.write(\"Delimiter: \" + data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of", "Label(mainwindow, text=\"Select template file(optional): \").grid(row=1, sticky=E, pady=10) label3 = Label(mainwindow, text=\"> Create Template\",", "== 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event):", "filetext -- Fills output box given a list of files maketemplate -- Links", "process list and removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets", "!= None or templates: if len(templates) == 1: for name in filenames: main(name,", "one or more csv files. If using multiple templates for multiple files list", "files\"\"\" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0, column=3,", "'.xls' or name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting to csv file\") wb =", "name_ext = os.path.splitext(file) # TODO handle empty sheets if name_ext[1] == '.xls' or", "application.py as the argument to the command line execution begins here. This will", "webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\") exporter.write_stats(data) print(\"[Step 7/7] Report Successfully Generated\") if", "-- writes summary of a single data object write_summary -- writes summary of", "('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if self.datafiles is not None:", "window.setstatus(\"ERROR: Permission Denied, ensure \" + new_name + \" is not open in", "in self.datafiles] Label(self.display, text=\"Selected Files: \", anchor='w').pack(fill=X) self.filetext(self.datafiles) self.statusText.set(\"Ready to Process Files...\") return", "name_ext[1] == '.xls' or name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting to csv file\")", "text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6,", "export = Exporter(exportfile) else: export = None if window is not None: window.setmaxprogress(len(filenames)", "empty columns total_errors -- total numher of errors throughout files \"\"\" def __init__(self,", "+ \"...\") if len(args) > 1: temp = Template(args[1]) data = Data(filename, temp)", "orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output Window self.display.grid(row=0, column=3,", "window=window) if export != None: export.write_summary() if excel: for file in excel: os.remove(file)", "fp.write(\"Error Report \" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed: \" + str(self.total_files)", "filetypes=[('All Files', '.*'),('Csv Files','*.csv'), ('Excel Workbook', '*.xlsx'), ('Excel 97-2003 Workbook', '.xls')], defaultextension=\"*.csv\") if", "over them. Converts excel files and applies template to each file Keyword arguments:", "Output Window self.display.grid(row=0, column=3, rowspan=7, sticky=N) # Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting", "writes summary of a single data object write_summary -- writes summary of all", "window.step_progress() data.pre_analysis() print(\"[Step 4/7] Finding Errors\") if window is not None: window.step_progress() data.find_errors()", "-- A file path. \"\"\" return location.rpartition('\\\\') def process_files(files, templates, exportfile='', window=None): \"\"\"Process", "print(\"[Step 4/7] Finding Errors\") if window is not None: window.step_progress() data.find_errors() print(\"[Step 5/7]", "read file: \" + filename) if exporter is not None: exporter.write_error(data) return None", "Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow,", "def process_files(files, templates, exportfile='', window=None): \"\"\"Process files and templates and runs the program", "print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all files\"\"\" mainwindow = self.display.winfo_parent()", "+ 0.01) if templates != None or templates: if len(templates) == 1: for", "Processes Csv files. ---------------------------------- Can process one or more csv files. Can specify", "= len(filenames) if num_templates == num_files: for i in range(0, num_files): main(filenames[i], templates[i],", "\" + data.delimiter_type + '\\n') fp.write(\"\\n\") def write_summary(self): \"\"\"Writes summary of all files", "sheets if name_ext[1] == '.xls' or name_ext[1] == '.xlsx': print(\"[Step 0/7] Converting to", "removes label\"\"\" print(\"Removing: \", file) self.datafiles.remove(file) label.destroy() def reset(self): \"\"\"Resets all files\"\"\" mainwindow", "process all the command line arguments before proceeding. \"\"\" files = [] templates", "displays them in the output window dataaskopenfolder -- Asks for folder to process", "self.template = None # Main Window mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow,", "data detected.\\n\\n\") def main(*args, **kwargs): \"\"\" Create Data and Report objects, providing necessary", "self.setstatus(\"Processing Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and exports results", "templates and runs the program over them. Converts excel files and applies template", "\"\"\" def __init__(self): root = Tk() root.wm_title(\"UWA Data-oracle\") self.datafiles = [] self.template =", "templates in processing exportfile -- file to export analysis to if applicable \"\"\"", "to be processed display -- output window Frame object template -- template to", "get_file_dir(location): \"\"\"Returns the directory of the file with the file name Keyword arguments:", "os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return folder def", "= [column.header for column in data.columns if column.empty] fp.write(\"Number of Empty Columns: \"", "len(templates) == 1: for name in filenames: main(name, templates[0], exporter=export, window=window) else: num_templates", "fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to read file, no", "= 0 def templateaskopenfile(self): \"\"\"Asks for template to use in processing\"\"\" self.template =", "when excel files have multiple sheets print(\"Error, different number of files and templates\")", "csv file already exists and is open window.setstatus(\"ERROR: Permission Denied, ensure \" +", "from .template_reader import * except: from data import * from report import *", "exporter=export, window=window) else: num_templates = len(templates) print(num_templates) num_files = len(filenames) if num_templates ==", "files instead of generating HTML Reports Author: <NAME> Last Updated: 28/02/2017 \"\"\" import", "used to describe one or more csv files. If using multiple templates for", "file in os.listdir(folder): self.datafiles.append(os.path.join(folder,file)) Label(self.display, text=str(\"Selected Folder: \" + folder), anchor='w').pack(fill=X) self.filetext(self.datafiles) return", "and applies template to each file Keyword arguments: files -- files to be", "Main Window mainwindow = Frame(root) self.display = Frame(mainwindow) Label(mainwindow, text=\"Select File(s) or Folder(s)", "data): \"\"\"Writes statistics of a single data object\"\"\" with open(self.filename, 'r+') as fp:", "help='one or more filenames for the processor to analyse') parser.add_argument('-t', nargs='+', metavar='template', help='a", "+ \"\\n\\n\") fp.write(\"Total Files Analysed: \" + str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows:", "+ filename) if exporter is not None: exporter.write_error(data) return None data.remove_invalid() data.create_columns() data.clean()", "None filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR: Unsupported file type:", "writing to exported file). Keyword Arguments: args -- Arguments provided to the program", "import * from .template_reader import * except: from data import * from report", "self.progress = ttk.Progressbar(mainwindow, orient=\"horizontal\", mode=\"determinate\") self.progress.grid(row=5, columnspan=3, sticky='ew', padx=10, pady=5) mainwindow.pack() # Output", "data object\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1]", "file type: \" + file) if window is not None: window.setstatus(\"WARNING: Unsupported file", "\"\"\"Class that creates a file containing analysis of all files run in program", "if exporter is None: print(\"[Step 6/7] Generating report\") report = Report(data) str_report =", "command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\", command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\",", "total_empty -- total number of empty columns total_errors -- total numher of errors", "Files...\") Thread(target=process_files, args=(self.datafiles, self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and exports results to", "-- template to use in process if applicable \"\"\" def __init__(self): root =", "main(filenames[i], templates[i], exporter=export, window=window) else: # TODO keep functionality when excel files have", "templates = [] if len(sys.argv) > 1: terminal = True pathname = os.path.dirname(sys.argv[0])", "self.template), kwargs={'window':self}).start() def process_export(self): \"\"\"Runs program and exports results to file\"\"\" self.progress[\"value\"] =", "1: for name in filenames: main(name, templates[0], exporter=export, window=window) else: num_templates = len(templates)", "5/7] Running Analysis\") if window is not None: window.step_progress() window.setstatus(\"Running Analysis on \"", "for column in data.columns if column.empty] fp.write(\"Number of Empty Columns: \" + str(len(empty_columns))", "Reports Author: <NAME> Last Updated: 28/02/2017 \"\"\" import argparse import webbrowser import textwrap", "event): \"\"\"Opens webbrowser to create template page on Data-oracle website\"\"\" webbrowser.open_new(\"http://www.data-oracle.com/upload/createTemplate/\") def process_report(self):", "filenames.append(new_name) excel.append(new_name) elif name_ext[1] == '.csv': filenames.append(file) else: print(\"ERROR: Unsupported file type: \"", "fp: fp.write(\"Error Report \" + os.path.split(self.filename)[1] + \"\\n\\n\") fp.write(\"Total Files Analysed: \" +", "interface and exporting class that creates files instead of generating HTML Reports Author:", "Button(mainwindow, text=\"Reset\", command=self.reset).grid(row=6, column=1, sticky='ew') Button(mainwindow, text=\"Exit\", command=mainwindow.quit).grid(row=6, column=2, sticky='ew', pady=5) self.progress =", "= Frame(mainwindow) self.display.grid(row=0, column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0 def", "selected in output window reset -- Resets the program removing all files from", "* from .report import * from .template_reader import * except: from data import", "type \" + file) if exportfile != '': export = Exporter(exportfile) else: export", "self.statusText.set(\"Waiting for File...\") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop()", "program in simpler and more explanatory way Methods: dataaskopenfile -- Asks for files", "column=3, rowspan=7, sticky=N) self.setstatus(\"Waiting for File...\") self.progress[\"value\"] = 0 def templateaskopenfile(self): \"\"\"Asks for", "in the output window\"\"\" self.reset() if self.template is not None: Label(self.display, text=str(\"Template Selected:", "files. ---------------------------------- Can process one or more csv files. Can specify template to", "filenames.append(new_name) excel.append(new_name) else: for sheet in sheet_names: sh = wb.sheet_by_name(sheet) new_name = os.path.join(os.path.splitext(file)[0]", "File...\") status = Label(root, textvariable=self.statusText, bd=1, relief=SUNKEN, anchor=W) status.pack(side=BOTTOM, fill=X) root.mainloop() def dataaskopenfile(self):", "-- file name to save export file as total_files -- total number of", "main(name, templates[0], exporter=export, window=window) else: num_templates = len(templates) print(num_templates) num_files = len(filenames) if", "= args[0] print(\"[Step 1/7] Processing file: \",filename) print(\"[Step 2/7] Reading data\") if window", "allowing users to interact with program in simpler and more explanatory way Methods:", "print(\"[Step 7/7] Report Successfully Generated\") print(\"Completed analysis for: \",filename) if window is not", "Asks for folder to process and displays the contained files in the output", "column=1, padx=5, sticky='ew') Button(mainwindow, text='Browse Folders...', command= self.dataaskopenfolder).grid(row=0, column=2, padx=5) Button(mainwindow, text=\"Browse Templates...\",", "def dataaskopenfile(self): \"\"\" Asks for files to process and displays them in the", "from the process queue and sets progress bar back to the start templateaskopenfile", "1: terminal = True pathname = os.path.dirname(sys.argv[0]) parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,\\ description=textwrap.dedent('''\\ Processes Csv", "exists and is open window.setstatus(\"ERROR: Permission Denied, ensure \" + new_name + \"", "program is run with application.py as the argument to the command line execution", "not open in another program\") def removefile(self, file, label): \"\"\"Removes file from process", "read file: \" + filename) window.setstatus(\"ERROR: Unable to read file: \" + filename)", "with program in simpler and more explanatory way Methods: dataaskopenfile -- Asks for", "templateaskopenfile(self): \"\"\"Asks for template to use in processing\"\"\" self.template = [] template =", "all files\"\"\" mainwindow = self.display.winfo_parent() mainwindow = self.display._nametowidget(mainwindow) self.display.destroy() self.display = Frame(mainwindow) self.display.grid(row=0,", "\"\"\"GUI for application allowing users to interact with program in simpler and more", "files processed process_export -- Runs program and creates a file containing analysis of", "fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') fp.write(\"ERROR: Unable to read file,", "-- Links to Create template web page of Data-oracle website process_report -- Runs", "None: Label(self.display, text=str(\"Template Selected: \" + self.template.name), anchor='w').pack(fill=X) folder = filedialog.askdirectory() if folder", "and Report objects, providing necessary information for them to run analysis and create", "that creates a file containing analysis of all files run in program Methods:", "wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0])", "will process all the command line arguments before proceeding. \"\"\" files = []", "is not open in another program\") def removefile(self, file, label): \"\"\"Removes file from", "files to use as templates in processing exportfile -- file to export analysis", "files and applies template to each file Keyword arguments: files -- files to", "through terminal or through GUI progress -- Progress bar showing progress through program", "Denied, ensure export file is not open in another program\") def removefile(self, file,", "for all files processed process_export -- Runs program and creates a file containing", "rowspan=7, sticky=N) # Status Bar self.statusText = StringVar() self.statusText.set(\"Waiting for File...\") status =", "print(\"[Step 0/7] Converting to csv file\") wb = xlrd.open_workbook(file) sheet_names = wb.sheet_names() if", "for files not processed fully\"\"\" with open(self.filename, 'r+') as fp: fp.seek(0,2) fp.write(\"Analysis of", "Unsupported file type \" + file) if exportfile != '': export = Exporter(exportfile)", "\",filename) if window is not None: window.step_progress() webbrowser.open(\"file://\"+html,new=2) else: print(\"[Step 6/7] Generating report\")", "filename, offline=True): self.filename = filename self.total_files = 0 self.total_invalid = 0 self.total_empty =", "try: from .data import * from .report import * from .template_reader import *", "HTML report or writing to exported file). Keyword Arguments: args -- Arguments provided", "\"\\n\\n\") fp.write(\"Total Files Analysed: \" + str(self.total_files) + \"\\n\") fp.write(\"Total Invalid Rows: \"", "command=self.templateaskopenfile).grid(row=1, column=1, padx=5) Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2,", "len(empty_columns) fp.write(\"Number of Error Cells: \" + str(len(data.errors)) + '\\n') self.total_errors = len(data.errors)", "Label(self.display, text=str(\"\\t\" + file), anchor='w') if os.name == 'posix': label.bind(\"<Button-2>\", remove_file(file, label)) else:", "def filetext(self, files): \"\"\"Provides text for output box given a list of files\"\"\"", "Button(mainwindow, text=\"View Report\", command=self.process_report).grid(row=4, column=1,sticky='ew', padx=5) Button(mainwindow, text=\"Export\", command=self.process_export).grid(row=4, column=2, sticky='ew') Button(mainwindow, text=\"Reset\",", "fp.write(\"ERROR: Unable to read file, no readable data detected.\\n\\n\") def main(*args, **kwargs): \"\"\"", "if len(sheet_names) == 1: sh = wb.sheet_by_name(sheet_names[0]) new_name = os.path.splitext(file)[0] + \".csv\" with", "if export file is open self.setstatus(\"ERROR: Permission Denied, ensure export file is not", "files -- files to be processed templates -- files to use as templates", "\" + filename + \"...\") if len(args) > 1: temp = Template(args[1]) data", "tkinter import * from tkinter import filedialog, ttk from threading import Thread try:", "files maketemplate -- Links to Create template web page of Data-oracle website process_report", "already exists and is open window.setstatus(\"ERROR: Permission Denied, ensure \" + new_name +", "\"\"\"Main execution body for program. Contains GUI interface and exporting class that creates", "self.statusText.set(msg) class Exporter(object): \"\"\"Class that creates a file containing analysis of all files", "excel files and applies template to each file Keyword arguments: files -- files", "as fp: fp.seek(0,2) fp.write(\"Analysis of \" + os.path.split(data.filename)[1] + '\\n') self.total_files += 1", "else: label.bind(\"<Button-3>\", remove_file(file, label)) label.pack(fill=X) def maketemplate(self, event): \"\"\"Opens webbrowser to create template", "generates report for all files processed process_export -- Runs program and creates a", "process_files(files, templates, exportfile='', window=None): \"\"\"Process files and templates and runs the program over" ]
[ "TestCase, override_settings from social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def", "session[\"github_status\"] = \"1\" session.save() def test_begin_view(self): response = self.client.get(reverse('social:begin', kwargs={'backend': 'github'})) self.assertEqual(response.status_code, 302)", "= '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session = self.client.session session[\"github_status\"] = \"1\"", "import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session = self.client.session", "setUp(self): session = self.client.session session[\"github_status\"] = \"1\" session.save() def test_begin_view(self): response = self.client.get(reverse('social:begin',", "SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session = self.client.session session[\"github_status\"] = \"1\" session.save() def", "from django.test import TestCase, override_settings from social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2')", "@override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session = self.client.session session[\"github_status\"] =", "from social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session", "AuthTestcase(TestCase): def setUp(self): session = self.client.session session[\"github_status\"] = \"1\" session.save() def test_begin_view(self): response", "social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session =", "'1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session = self.client.session session[\"github_status\"] = \"1\" session.save()", "session = self.client.session session[\"github_status\"] = \"1\" session.save() def test_begin_view(self): response = self.client.get(reverse('social:begin', kwargs={'backend':", "def setUp(self): session = self.client.session session[\"github_status\"] = \"1\" session.save() def test_begin_view(self): response =", "= self.client.session session[\"github_status\"] = \"1\" session.save() def test_begin_view(self): response = self.client.get(reverse('social:begin', kwargs={'backend': 'github'}))", "reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self): session = self.client.session session[\"github_status\"]", "class AuthTestcase(TestCase): def setUp(self): session = self.client.session session[\"github_status\"] = \"1\" session.save() def test_begin_view(self):", "self.client.session session[\"github_status\"] = \"1\" session.save() def test_begin_view(self): response = self.client.get(reverse('social:begin', kwargs={'backend': 'github'})) self.assertEqual(response.status_code,", "override_settings from social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase): def setUp(self):", "django.test import TestCase, override_settings from social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class", "import TestCase, override_settings from social_django.compat import reverse @override_settings(SOCIAL_AUTH_GITHUB_KEY = '1', SOCIAL_AUTH_GITHUB_SECRET='2') class AuthTestcase(TestCase):" ]
[ "user-defined repetition interval. \"\"\" counter = 0 listen_interval = self.repetition_interval/10 previous_decision = self.decision", "def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port =", "self.logger.warning(f\"No Experiment state is yet available for the experiment {self.experiment_id}\") if numb_of_measured_configurations >", "def update_expression(self, stop_condition_type: str, decision: bool) -> None: \"\"\" This function sends event", "{\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host,", "body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel:", "self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads)", "function sends stop_experiment message to main node. It could be triggered only if", "StopConditionTriggerLogic expression, since this particular Stop Condition was triggered. :param stop_condition_type: Stop Condition", "with defined in Experiment Description period. When the Stop Condition is triggered to", "ABC, abstractmethod import pika from tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters:", "shutting down Stop Condition (in case of BRISE Experiment termination). \"\"\" def __init__(self,", "for initializing consumer thread :param stop_condition: an instance of Stop Condition object \"\"\"", "this method will be called in `self_evaluation` method with defined in Experiment Description", "be called in `self_evaluation` method with defined in Experiment Description period. When the", "time.sleep(listen_interval) counter = counter + 1 if counter % 10 == 0: counter", "result \"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) # 1 second finally: if self.connection.is_open:", "method with defined in Experiment Description period. When the Stop Condition is triggered", ":param decision: Stop Condition decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type,", "counter = 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except", "channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This function performs self-evaluation of Stop", "'stop' if self.decision else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision)", "function sends event to Stop Condition Validator with command to check StopConditionTriggerLogic expression,", "numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state is yet available", "StopCondition): \"\"\" The function for initializing consumer thread :param stop_condition: an instance of", "pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender',", "Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends", "+ 1 if counter % 10 == 0: counter = 0 numb_of_measured_configurations =", "self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method, properties,", "of Stop Condition periodically according to user-defined repetition interval. \"\"\" counter = 0", "logging import os import threading import time from abc import ABC, abstractmethod import", "available for the experiment {self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\",", "self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id", "str, decision: bool) -> None: \"\"\" This function sends event to Stop Condition", "port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not", "variable 'self.decision' to True. :return: None \"\"\" def update_expression(self, stop_condition_type: str, decision: bool)", "instance of Stop Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection", "def stop_threads(self, ch, method, properties, body): \"\"\" This function stops Stop Condition microservice.", "main node. It could be triggered only if Stop Condition initialization fails. \"\"\"", "(boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision } body =", "\"\"\" This function stops Stop Condition microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk", "break self.is_finish() if previous_decision != self.decision: msg = f\"{self.__class__.__name__} Stop Condition decision: \"", "thread run the functionality of Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self)", "\"\"\" counter = 0 listen_interval = self.repetition_interval/10 previous_decision = self.decision # for sending", "def start_threads(self): \"\"\" Start 2 threads. One thread listens event to shut down", "method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active = False", "previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment message", "only after this timer ends. # This code decision is designed to accelerate", "the experiment {self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if", "the update only when decision changes while self.thread_is_active: # time.sleep blocks thread execution", "thread execution for whole time specified in function argument # and stop message", "__init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\")", "datetime import json import logging import os import threading import time from abc", "pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def", "self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function for thread stop \"\"\" self._is_interrupted", "import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host", "functionality, listening of queue with task result \"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1)", "= f\"{self.__class__.__name__} Stop Condition decision: \" \\ f\"{ 'stop' if self.decision else 'continue'}", "False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function for thread stop \"\"\"", "Stop Condition was triggered. :param stop_condition_type: Stop Condition identificator :param decision: Stop Condition", "self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval", "routing_key='stop_experiment_queue', body=\"Stop condition is not able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class", "Stop Condition. Second thread run the functionality of Stop Condition (`self_evaluation` method). \"\"\"", "= self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment message to", "in `self_evaluation` method with defined in Experiment Description period. When the Stop Condition", "and stop message from main-node could be delivered only after this timer ends.", "BRISE, it changes internal state of variable 'self.decision' to True. :return: None \"\"\"", ">= search_space_size: break self.is_finish() if previous_decision != self.decision: msg = f\"{self.__class__.__name__} Stop Condition", "Experiment Description period. When the Stop Condition is triggered to stop BRISE, it", "consumers functionality, listening of queue with task result \"\"\" try: while not self._is_interrupted:", "if Stop Condition initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with", "= \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision !=", "> 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break self.is_finish()", "Condition was triggered. :param stop_condition_type: Stop Condition identificator :param decision: Stop Condition decision", "This function sends event to Stop Condition Validator with command to check StopConditionTriggerLogic", "self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result =", "\\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision != self.decision:", "method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=())", "listening `stop_brise_components` queue for shutting down Stop Condition (in case of BRISE Experiment", "of entry to tasks results consumers functionality, listening of queue with task result", "0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state is", "entry to tasks results consumers functionality, listening of queue with task result \"\"\"", "Stop Condition should be overridden in this method. Later, this method will be", "is designed to accelerate stopping process. time.sleep(listen_interval) counter = counter + 1 if", "experiment_description: dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"),", "from tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id:", "thread :param stop_condition: an instance of Stop Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition:", "should be overridden in this method. Later, this method will be called in", "= False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function for thread stop", "ends. # This code decision is designed to accelerate stopping process. time.sleep(listen_interval) counter", "function argument # and stop message from main-node could be delivered only after", "\"\"\" Point of entry to tasks results consumers functionality, listening of queue with", "as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This function performs self-evaluation of", "if numb_of_measured_configurations > 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size:", "down Stop Condition (in case of BRISE Experiment termination). \"\"\" def __init__(self, stop_condition:", "os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id =", "process. time.sleep(listen_interval) counter = counter + 1 if counter % 10 == 0:", "os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False self.logger =", "\"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue',", "message to main node. It could be triggered only if Stop Condition initialization", "self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self): \"\"\" Main logic of Stop Condition", "TypeError: self.logger.warning(f\"No Experiment state is yet available for the experiment {self.experiment_id}\") if numb_of_measured_configurations", "thread stop \"\"\" self._is_interrupted = True def run(self): \"\"\" Point of entry to", "import ABC, abstractmethod import pika from tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self,", "execution for whole time specified in function argument # and stop message from", "Stop Condition (in case of BRISE Experiment termination). \"\"\" def __init__(self, stop_condition: StopCondition):", "message from main-node could be delivered only after this timer ends. # This", "on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function for thread stop \"\"\" self._is_interrupted = True", "exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def", "\"decision\": decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with", "\\ f\"{ 'stop' if self.decision else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision = self.decision", "= self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True,", "ch, method, properties, body): \"\"\" This function stops Stop Condition microservice. :param ch:", "identificator :param decision: Stop Condition decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\":", "= self.decision # for sending the update only when decision changes while self.thread_is_active:", "import pika from tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description:", "True. :return: None \"\"\" def update_expression(self, stop_condition_type: str, decision: bool) -> None: \"\"\"", "update only when decision changes while self.thread_is_active: # time.sleep blocks thread execution for", "self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name =", "= self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted =", "from abc import ABC, abstractmethod import pika from tools.mongo_dao import MongoDB class StopCondition(ABC):", "This function stops Stop Condition microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param", "start_threads(self): \"\"\" Start 2 threads. One thread listens event to shut down Stop", "= 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError:", "function stops Stop Condition microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties:", "= EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self,", "yet available for the experiment {self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size = \\", "queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function for", "method will be called in `self_evaluation` method with defined in Experiment Description period.", "decision: Stop Condition decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\":", "Main logic of Stop Condition should be overridden in this method. Later, this", "\"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self): \"\"\" Main logic of Stop", "to accelerate stopping process. time.sleep(listen_interval) counter = counter + 1 if counter %", "accelerate stopping process. time.sleep(listen_interval) counter = counter + 1 if counter % 10", "stop BRISE, it changes internal state of variable 'self.decision' to True. :return: None", "blocks thread execution for whole time specified in function argument # and stop", "datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2 threads. One thread listens event", "False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start", "body: empty \"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self): \"\"\" Main logic", "run the functionality of Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start()", "1 if counter % 10 == 0: counter = 0 numb_of_measured_configurations = 0", "@abstractmethod def is_finish(self): \"\"\" Main logic of Stop Condition should be overridden in", "The function for thread stop \"\"\" self._is_interrupted = True def run(self): \"\"\" Point", "to tasks results consumers functionality, listening of queue with task result \"\"\" try:", "is responsible for listening `stop_brise_components` queue for shutting down Stop Condition (in case", "self.thread_is_active: # time.sleep blocks thread execution for whole time specified in function argument", "is triggered to stop BRISE, it changes internal state of variable 'self.decision' to", "0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No", "triggered. :param stop_condition_type: Stop Condition identificator :param decision: Stop Condition decision (boolean) \"\"\"", "self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def", "When the Stop Condition is triggered to stop BRISE, it changes internal state", "self.thread.start() def stop_threads(self, ch, method, properties, body): \"\"\" This function stops Stop Condition", "decision is designed to accelerate stopping process. time.sleep(listen_interval) counter = counter + 1", "Stop Condition initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel()", "Condition microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body:", "state of variable 'self.decision' to True. :return: None \"\"\" def update_expression(self, stop_condition_type: str,", "# time.sleep blocks thread execution for whole time specified in function argument #", "be triggered only if Stop Condition initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port))", "able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class is responsible for listening `stop_brise_components`", "class EventServiceConnection(threading.Thread): \"\"\" This class is responsible for listening `stop_brise_components` queue for shutting", "tasks results consumers functionality, listening of queue with task result \"\"\" try: while", "Stop Condition decision: \" \\ f\"{ 'stop' if self.decision else 'continue'} running Experiment.\"", "update_expression(self, stop_condition_type: str, decision: bool) -> None: \"\"\" This function sends event to", "whole time specified in function argument # and stop message from main-node could", "True def run(self): \"\"\" Point of entry to tasks results consumers functionality, listening", "for thread stop \"\"\" self._is_interrupted = True def run(self): \"\"\" Point of entry", "shut down Stop Condition. Second thread run the functionality of Stop Condition (`self_evaluation`", "= self.repetition_interval/10 previous_decision = self.decision # for sending the update only when decision", "def run(self): \"\"\" Point of entry to tasks results consumers functionality, listening of", "= threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method, properties, body): \"\"\" This function", "triggered to stop BRISE, it changes internal state of variable 'self.decision' to True.", "after this timer ends. # This code decision is designed to accelerate stopping", "self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function for thread", "self_evaluation(self): \"\"\" This function performs self-evaluation of Stop Condition periodically according to user-defined", ":param properties: pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod def", "designed to accelerate stopping process. time.sleep(listen_interval) counter = counter + 1 if counter", "import datetime import json import logging import os import threading import time from", "= 0 listen_interval = self.repetition_interval/10 previous_decision = self.decision # for sending the update", "self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state is yet available for the experiment {self.experiment_id}\")", "= False @abstractmethod def is_finish(self): \"\"\" Main logic of Stop Condition should be", "stop_condition_type: str, decision: bool) -> None: \"\"\" This function sends event to Stop", "command to check StopConditionTriggerLogic expression, since this particular Stop Condition was triggered. :param", "to user-defined repetition interval. \"\"\" counter = 0 listen_interval = self.repetition_interval/10 previous_decision =", "Condition is triggered to stop BRISE, it changes internal state of variable 'self.decision'", "This function performs self-evaluation of Stop Condition periodically according to user-defined repetition interval.", "self.decision else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self):", "stop_condition: StopCondition): \"\"\" The function for initializing consumer thread :param stop_condition: an instance", "auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function for thread stop \"\"\" self._is_interrupted =", "queue with task result \"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) # 1 second", "Condition (in case of BRISE Experiment termination). \"\"\" def __init__(self, stop_condition: StopCondition): \"\"\"", "if counter % 10 == 0: counter = 0 numb_of_measured_configurations = 0 try:", "`self_evaluation` method with defined in Experiment Description period. When the Stop Condition is", "of variable 'self.decision' to True. :return: None \"\"\" def update_expression(self, stop_condition_type: str, decision:", "\"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) # 1 second finally: if self.connection.is_open: self.connection.close()", "Condition. Second thread run the functionality of Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread", "self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment message to main node. It", "os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type =", "port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\"", "to Stop Condition Validator with command to check StopConditionTriggerLogic expression, since this particular", "import threading import time from abc import ABC, abstractmethod import pika from tools.mongo_dao", "connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not able to initialize.\") class", "} body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as", "in Experiment Description period. When the Stop Condition is triggered to stop BRISE,", "sending the update only when decision changes while self.thread_is_active: # time.sleep blocks thread", "= False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\"", "'continue'} running Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This", "class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\")", "of Stop Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection =", "\"\"\" This function performs self-evaluation of Stop Condition periodically according to user-defined repetition", "\"\"\" This class is responsible for listening `stop_brise_components` queue for shutting down Stop", "this timer ends. # This code decision is designed to accelerate stopping process.", "task result \"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) # 1 second finally: if", "down Stop Condition. Second thread run the functionality of Stop Condition (`self_evaluation` method).", "self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name,", "self).__init__() self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result", "Start 2 threads. One thread listens event to shut down Stop Condition. Second", "\"\"\" This function sends event to Stop Condition Validator with command to check", "experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2 threads. One thread listens event to shut", "changes while self.thread_is_active: # time.sleep blocks thread execution for whole time specified in", "counter + 1 if counter % 10 == 0: counter = 0 numb_of_measured_configurations", "MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision =", "stop \"\"\" self._is_interrupted = True def run(self): \"\"\" Point of entry to tasks", "Point of entry to tasks results consumers functionality, listening of queue with task", "str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"),", "event to Stop Condition Validator with command to check StopConditionTriggerLogic expression, since this", "self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision != self.decision: msg =", ":return: None \"\"\" def update_expression(self, stop_condition_type: str, decision: bool) -> None: \"\"\" This", "None \"\"\" def update_expression(self, stop_condition_type: str, decision: bool) -> None: \"\"\" This function", "stopping process. time.sleep(listen_interval) counter = counter + 1 if counter % 10 ==", "\"\"\" def update_expression(self, stop_condition_type: str, decision: bool) -> None: \"\"\" This function sends", "Condition periodically according to user-defined repetition interval. \"\"\" counter = 0 listen_interval =", "self.repetition_interval/10 previous_decision = self.decision # for sending the update only when decision changes", "def is_finish(self): \"\"\" Main logic of Stop Condition should be overridden in this", "argument # and stop message from main-node could be delivered only after this", "queue for shutting down Stop Condition (in case of BRISE Experiment termination). \"\"\"", "counter % 10 == 0: counter = 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations", "Condition should be overridden in this method. Later, this method will be called", "except TypeError: self.logger.warning(f\"No Experiment state is yet available for the experiment {self.experiment_id}\") if", "channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This function performs self-evaluation of Stop Condition", "def stop(self): \"\"\" The function for thread stop \"\"\" self._is_interrupted = True def", "results consumers functionality, listening of queue with task result \"\"\" try: while not", "self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment message to main", "to main node. It could be triggered only if Stop Condition initialization fails.", "self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self):", "EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch,", "while self.thread_is_active: # time.sleep blocks thread execution for whole time specified in function", "internal state of variable 'self.decision' to True. :return: None \"\"\" def update_expression(self, stop_condition_type:", "Stop Condition is triggered to stop BRISE, it changes internal state of variable", "self.is_finish() if previous_decision != self.decision: msg = f\"{self.__class__.__name__} Stop Condition decision: \" \\", ":param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty \"\"\"", "as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not able", "body=\"Stop condition is not able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class is", "functionality of Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active =", "# This code decision is designed to accelerate stopping process. time.sleep(listen_interval) counter =", "sends stop_experiment message to main node. It could be triggered only if Stop", "self._is_interrupted = True def run(self): \"\"\" Point of entry to tasks results consumers", "search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision", "0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break self.is_finish() if", "stop_experiment message to main node. It could be triggered only if Stop Condition", "with task result \"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) # 1 second finally:", "in function argument # and stop message from main-node could be delivered only", "an instance of Stop Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition", "StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port", "self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\"))", "= {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection(", "with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This function performs", "check StopConditionTriggerLogic expression, since this particular Stop Condition was triggered. :param stop_condition_type: Stop", "main-node could be delivered only after this timer ends. # This code decision", "= True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method, properties, body):", "This code decision is designed to accelerate stopping process. time.sleep(listen_interval) counter = counter", "not able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class is responsible for listening", "only if Stop Condition initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection:", "periodically according to user-defined repetition interval. \"\"\" counter = 0 listen_interval = self.repetition_interval/10", "specified in function argument # and stop message from main-node could be delivered", "= datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2 threads. One thread listens", "experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{", "os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"])", "function performs self-evaluation of Stop Condition periodically according to user-defined repetition interval. \"\"\"", "True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method, properties, body): \"\"\"", "delivered only after this timer ends. # This code decision is designed to", "stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment message to main node. It could be", ":param stop_condition: an instance of Stop Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition", "Second thread run the functionality of Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread =", "properties, body): \"\"\" This function stops Stop Condition microservice. :param ch: pika.Channel :param", "channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This", "Stop Condition Validator with command to check StopConditionTriggerLogic expression, since this particular Stop", "\"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision } body = json.dumps(dictionary_dump)", "for the experiment {self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"]", "f\"{self.__class__.__name__} Stop Condition decision: \" \\ f\"{ 'stop' if self.decision else 'continue'} running", "if numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision != self.decision: msg = f\"{self.__class__.__name__}", "= os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type", "time specified in function argument # and stop message from main-node could be", "stop message from main-node could be delivered only after this timer ends. #", "dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision } body = json.dumps(dictionary_dump) with", "stop_condition_type, \"decision\": decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection:", "for shutting down Stop Condition (in case of BRISE Experiment termination). \"\"\" def", "properties: pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self):", "with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop", "self-evaluation of Stop Condition periodically according to user-defined repetition interval. \"\"\" counter =", "Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port))", "self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self):", "pika from tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict,", "-> None: \"\"\" This function sends event to Stop Condition Validator with command", "% 10 == 0: counter = 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations =", "self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment message to main node.", "body): \"\"\" This function stops Stop Condition microservice. :param ch: pika.Channel :param method:", "with command to check StopConditionTriggerLogic expression, since this particular Stop Condition was triggered.", "changes internal state of variable 'self.decision' to True. :return: None \"\"\" def update_expression(self,", "sends event to Stop Condition Validator with command to check StopConditionTriggerLogic expression, since", "Later, this method will be called in `self_evaluation` method with defined in Experiment", "stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database", "previous_decision = self.decision # for sending the update only when decision changes while", "timer ends. # This code decision is designed to accelerate stopping process. time.sleep(listen_interval)", "False @abstractmethod def is_finish(self): \"\"\" Main logic of Stop Condition should be overridden", "def self_evaluation(self): \"\"\" This function performs self-evaluation of Stop Condition periodically according to", "One thread listens event to shut down Stop Condition. Second thread run the", "self.decision # for sending the update only when decision changes while self.thread_is_active: #", "be overridden in this method. Later, this method will be called in `self_evaluation`", "Stop Condition periodically according to user-defined repetition interval. \"\"\" counter = 0 listen_interval", "run(self): \"\"\" Point of entry to tasks results consumers functionality, listening of queue", "= 0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state", "class is responsible for listening `stop_brise_components` queue for shutting down Stop Condition (in", "only when decision changes while self.thread_is_active: # time.sleep blocks thread execution for whole", "period. When the Stop Condition is triggered to stop BRISE, it changes internal", "MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str): self.event_host =", "as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This", "= pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue", "import os import threading import time from abc import ABC, abstractmethod import pika", "be delivered only after this timer ends. # This code decision is designed", "= json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='',", ":param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active =", "search_space_size: break self.is_finish() if previous_decision != self.decision: msg = f\"{self.__class__.__name__} Stop Condition decision:", "= os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id", "for whole time specified in function argument # and stop message from main-node", "threads. One thread listens event to shut down Stop Condition. Second thread run", "\"\"\" Main logic of Stop Condition should be overridden in this method. Later,", "The function for initializing consumer thread :param stop_condition: an instance of Stop Condition", "stop_threads(self, ch, method, properties, body): \"\"\" This function stops Stop Condition microservice. :param", "expression, since this particular Stop Condition was triggered. :param stop_condition_type: Stop Condition identificator", "the functionality of Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active", "# for sending the update only when decision changes while self.thread_is_active: # time.sleep", "responsible for listening `stop_brise_components` queue for shutting down Stop Condition (in case of", "else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\"", "to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class is responsible for listening `stop_brise_components` queue", "import time from abc import ABC, abstractmethod import pika from tools.mongo_dao import MongoDB", "time from abc import ABC, abstractmethod import pika from tools.mongo_dao import MongoDB class", "fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='',", "pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod", "dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"),", "dict, experiment_description: dict, experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database =", "if previous_decision != self.decision: msg = f\"{self.__class__.__name__} Stop Condition decision: \" \\ f\"{", "of BRISE Experiment termination). \"\"\" def __init__(self, stop_condition: StopCondition): \"\"\" The function for", "channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\"", "according to user-defined repetition interval. \"\"\" counter = 0 listen_interval = self.repetition_interval/10 previous_decision", "Stop Condition microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param", "\"stop_condition_type\": stop_condition_type, \"decision\": decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as", "listen_interval = self.repetition_interval/10 previous_decision = self.decision # for sending the update only when", "case of BRISE Experiment termination). \"\"\" def __init__(self, stop_condition: StopCondition): \"\"\" The function", "code decision is designed to accelerate stopping process. time.sleep(listen_interval) counter = counter +", "= counter + 1 if counter % 10 == 0: counter = 0", "initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class is responsible for listening `stop_brise_components` queue for", "initializing consumer thread :param stop_condition: an instance of Stop Condition object \"\"\" super(EventServiceConnection,", "Condition decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision }", "Stop Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host,", "experiment_id: str): self.event_host = os.getenv(\"BRISE_EVENT_SERVICE_HOST\") self.event_port = os.getenv(\"BRISE_EVENT_SERVICE_AMQP_PORT\") self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"),", "to True. :return: None \"\"\" def update_expression(self, stop_condition_type: str, decision: bool) -> None:", "self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state is yet available for the experiment", "called in `self_evaluation` method with defined in Experiment Description period. When the Stop", "self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted", "previous_decision != self.decision: msg = f\"{self.__class__.__name__} Stop Condition decision: \" \\ f\"{ 'stop'", "\" \\ f\"{ 'stop' if self.decision else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision =", "= stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True)", "self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False", "== 0: counter = 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\",", "method, properties, body): \"\"\" This function stops Stop Condition microservice. :param ch: pika.Channel", "__init__(self, stop_condition: StopCondition): \"\"\" The function for initializing consumer thread :param stop_condition: an", "decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision } body", "f\"{ 'stop' if self.decision else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type,", "Experiment state is yet available for the experiment {self.experiment_id}\") if numb_of_measured_configurations > 0:", "function for initializing consumer thread :param stop_condition: an instance of Stop Condition object", "BRISE Experiment termination). \"\"\" def __init__(self, stop_condition: StopCondition): \"\"\" The function for initializing", "performs self-evaluation of Stop Condition periodically according to user-defined repetition interval. \"\"\" counter", "\"\"\" Start 2 threads. One thread listens event to shut down Stop Condition.", "This function sends stop_experiment message to main node. It could be triggered only", "numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision != self.decision: msg = f\"{self.__class__.__name__} Stop", "stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name", "\"\"\" This function sends stop_experiment message to main node. It could be triggered", "pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self):", "self.thread_is_active = False @abstractmethod def is_finish(self): \"\"\" Main logic of Stop Condition should", "def __init__(self, stop_condition: StopCondition): \"\"\" The function for initializing consumer thread :param stop_condition:", "threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method, properties, body): \"\"\" This function stops", "This class is responsible for listening `stop_brise_components` queue for shutting down Stop Condition", "msg = f\"{self.__class__.__name__} Stop Condition decision: \" \\ f\"{ 'stop' if self.decision else", "experiment {self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations", "decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel()", "Condition (`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread =", "tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict, experiment_description: dict, experiment_id: str):", "with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not able to initialize.\")", "self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2 threads. One thread", "to stop BRISE, it changes internal state of variable 'self.decision' to True. :return:", "Validator with command to check StopConditionTriggerLogic expression, since this particular Stop Condition was", "(in case of BRISE Experiment termination). \"\"\" def __init__(self, stop_condition: StopCondition): \"\"\" The", "Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread", "numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment", "particular Stop Condition was triggered. :param stop_condition_type: Stop Condition identificator :param decision: Stop", "Stop Condition identificator :param decision: Stop Condition decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\":", ":param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self): \"\"\" Main", "Description period. When the Stop Condition is triggered to stop BRISE, it changes", "self.database = MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"]", "empty \"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self): \"\"\" Main logic of", "condition is not able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class is responsible", "will be called in `self_evaluation` method with defined in Experiment Description period. When", "self.decision: msg = f\"{self.__class__.__name__} Stop Condition decision: \" \\ f\"{ 'stop' if self.decision", "Experiment termination). \"\"\" def __init__(self, stop_condition: StopCondition): \"\"\" The function for initializing consumer", "to check StopConditionTriggerLogic expression, since this particular Stop Condition was triggered. :param stop_condition_type:", "stop_condition: an instance of Stop Condition object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition =", "= experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval =", "in this method. Later, this method will be called in `self_evaluation` method with", "the Stop Condition is triggered to stop BRISE, it changes internal state of", "consumer thread :param stop_condition: an instance of Stop Condition object \"\"\" super(EventServiceConnection, self).__init__()", "= MongoDB(os.getenv(\"BRISE_DATABASE_HOST\"), os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision", "os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False self.logger", "defined in Experiment Description period. When the Stop Condition is triggered to stop", "stop(self): \"\"\" The function for thread stop \"\"\" self._is_interrupted = True def run(self):", "decision: bool) -> None: \"\"\" This function sends event to Stop Condition Validator", "`stop_brise_components` queue for shutting down Stop Condition (in case of BRISE Experiment termination).", "of Stop Condition (`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True", "function for thread stop \"\"\" self._is_interrupted = True def run(self): \"\"\" Point of", "pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is", "this particular Stop Condition was triggered. :param stop_condition_type: Stop Condition identificator :param decision:", "body=body) def self_evaluation(self): \"\"\" This function performs self-evaluation of Stop Condition periodically according", "Condition initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as", "\"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start()", "connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This function performs self-evaluation", "node. It could be triggered only if Stop Condition initialization fails. \"\"\" with", "super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel()", "initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel:", "import json import logging import os import threading import time from abc import", "is_finish(self): \"\"\" Main logic of Stop Condition should be overridden in this method.", "decision changes while self.thread_is_active: # time.sleep blocks thread execution for whole time specified", "logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2 threads. One", "routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This function performs self-evaluation of Stop Condition periodically", "# and stop message from main-node could be delivered only after this timer", "0: counter = 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"]", "as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not able to initialize.\") class EventServiceConnection(threading.Thread):", "self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The function", "numb_of_measured_configurations > 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break", "time.sleep blocks thread execution for whole time specified in function argument # and", "json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue',", "= \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state is yet available for", "\"\"\" self._is_interrupted = True def run(self): \"\"\" Point of entry to tasks results", "= stop_condition_parameters[\"Name\"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds()", "method. Later, this method will be called in `self_evaluation` method with defined in", "connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body) def self_evaluation(self): \"\"\" This function", "self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method, properties, body): \"\"\" This", "= logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2 threads.", "decision: \" \\ f\"{ 'stop' if self.decision else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision", "10 == 0: counter = 0 numb_of_measured_configurations = 0 try: numb_of_measured_configurations = \\", "self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation, args=()) self.thread.start() def stop_threads(self, ch, method,", "StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='',", "\"\"\" The function for thread stop \"\"\" self._is_interrupted = True def run(self): \"\"\"", "is yet available for the experiment {self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size =", "listening of queue with task result \"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) #", "microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty", "{self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size = \\ self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >=", "of queue with task result \"\"\" try: while not self._is_interrupted: self.consume_channel.connection.process_data_events(time_limit=1) # 1", "\"\"\" def __init__(self, stop_condition: StopCondition): \"\"\" The function for initializing consumer thread :param", "self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]:", "if self.decision else 'continue'} running Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def", "overridden in this method. Later, this method will be called in `self_evaluation` method", "could be delivered only after this timer ends. # This code decision is", "= True def run(self): \"\"\" Point of entry to tasks results consumers functionality,", "counter = 0 listen_interval = self.repetition_interval/10 previous_decision = self.decision # for sending the", "self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision } body = json.dumps(dictionary_dump) with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port))", "was triggered. :param stop_condition_type: Stop Condition identificator :param decision: Stop Condition decision (boolean)", "triggered only if Stop Condition initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as", "could be triggered only if Stop Condition initialization fails. \"\"\" with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host,", "Stop Condition decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id, \"stop_condition_type\": stop_condition_type, \"decision\": decision", "with pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='check_stop_condition_expression_queue', body=body)", "counter = counter + 1 if counter % 10 == 0: counter =", "it changes internal state of variable 'self.decision' to True. :return: None \"\"\" def", "(`self_evaluation` method). \"\"\" self.listen_thread = EventServiceConnection(self) self.listen_thread.start() self.thread_is_active = True self.thread = threading.Thread(target=self.self_evaluation,", "'self.decision' to True. :return: None \"\"\" def update_expression(self, stop_condition_type: str, decision: bool) ->", "stop_condition_type: Stop Condition identificator :param decision: Stop Condition decision (boolean) \"\"\" dictionary_dump =", "since this particular Stop Condition was triggered. :param stop_condition_type: Stop Condition identificator :param", "Condition decision: \" \\ f\"{ 'stop' if self.decision else 'continue'} running Experiment.\" self.logger.info(msg)", "Condition Validator with command to check StopConditionTriggerLogic expression, since this particular Stop Condition", "self.database.get_last_record_by_experiment_id(\"Search_space\", self.experiment_id)[\"Search_space_size\"] if numb_of_measured_configurations >= search_space_size: break self.is_finish() if previous_decision != self.decision: msg", "listens event to shut down Stop Condition. Second thread run the functionality of", "def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment message to main node. It could", "EventServiceConnection(threading.Thread): \"\"\" This class is responsible for listening `stop_brise_components` queue for shutting down", "stops Stop Condition microservice. :param ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties", ":param stop_condition_type: Stop Condition identificator :param decision: Stop Condition decision (boolean) \"\"\" dictionary_dump", "!= self.decision: msg = f\"{self.__class__.__name__} Stop Condition decision: \" \\ f\"{ 'stop' if", "interval. \"\"\" counter = 0 listen_interval = self.repetition_interval/10 previous_decision = self.decision # for", "pika.BlockingConnection( pika.ConnectionParameters(host=self.event_host, port=self.event_port)) as connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition", "stop_condition_parameters[\"Name\"] self.decision = False self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def", "ch: pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop()", "state is yet available for the experiment {self.experiment_id}\") if numb_of_measured_configurations > 0: search_space_size", "is not able to initialize.\") class EventServiceConnection(threading.Thread): \"\"\" This class is responsible for", "2 threads. One thread listens event to shut down Stop Condition. Second thread", "experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2 threads. One thread listens event to", "from main-node could be delivered only after this timer ends. # This code", "of Stop Condition should be overridden in this method. Later, this method will", "None: \"\"\" This function sends event to Stop Condition Validator with command to", "os.getenv(\"BRISE_DATABASE_PORT\"), os.getenv(\"BRISE_DATABASE_NAME\"), os.getenv(\"BRISE_DATABASE_USER\"), os.getenv(\"BRISE_DATABASE_PASS\")) self.experiment_id = experiment_id self.stop_condition_type = stop_condition_parameters[\"Name\"] self.decision = False", "thread listens event to shut down Stop Condition. Second thread run the functionality", "try: numb_of_measured_configurations = \\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state is yet", "threading import time from abc import ABC, abstractmethod import pika from tools.mongo_dao import", "abc import ABC, abstractmethod import pika from tools.mongo_dao import MongoDB class StopCondition(ABC): def", "\"\"\" The function for initializing consumer thread :param stop_condition: an instance of Stop", "object \"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel", "json import logging import os import threading import time from abc import ABC,", "self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function sends stop_experiment", "pika.Channel :param method: pika.spec.Basic.GetOk :param properties: pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active", "os import threading import time from abc import ABC, abstractmethod import pika from", "repetition interval. \"\"\" counter = 0 listen_interval = self.repetition_interval/10 previous_decision = self.decision #", "args=()) self.thread.start() def stop_threads(self, ch, method, properties, body): \"\"\" This function stops Stop", "for listening `stop_brise_components` queue for shutting down Stop Condition (in case of BRISE", "import logging import os import threading import time from abc import ABC, abstractmethod", "to shut down Stop Condition. Second thread run the functionality of Stop Condition", "0 listen_interval = self.repetition_interval/10 previous_decision = self.decision # for sending the update only", "\\ self.database.get_last_record_by_experiment_id(\"Experiment_state\", self.experiment_id)[\"Number_of_measured_configs\"] except TypeError: self.logger.warning(f\"No Experiment state is yet available for the", "termination). \"\"\" def __init__(self, stop_condition: StopCondition): \"\"\" The function for initializing consumer thread", "= self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\"", "bool) -> None: \"\"\" This function sends event to Stop Condition Validator with", "when decision changes while self.thread_is_active: # time.sleep blocks thread execution for whole time", "\"\"\" super(EventServiceConnection, self).__init__() self.stop_condition: StopCondition = stop_condition self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.stop_condition.event_host, port=self.stop_condition.event_port)) self.consume_channel =", "connection: with connection.channel() as channel: channel.basic_publish(exchange='', routing_key='stop_experiment_queue', body=\"Stop condition is not able to", "event to shut down Stop Condition. Second thread run the functionality of Stop", "running Experiment.\" self.logger.info(msg) previous_decision = self.decision self.update_expression(self.stop_condition_type, self.decision) def stop_experiment_due_to_failed_sc_creation(self): \"\"\" This function", "It could be triggered only if Stop Condition initialization fails. \"\"\" with pika.BlockingConnection(", "Condition identificator :param decision: Stop Condition decision (boolean) \"\"\" dictionary_dump = {\"experiment_id\": self.experiment_id,", "pika.spec.BasicProperties :param body: empty \"\"\" self.listen_thread.stop() self.thread_is_active = False @abstractmethod def is_finish(self): \"\"\"", "self.logger = logging.getLogger(stop_condition_parameters[\"Name\"]) self.repetition_interval = datetime.timedelta(**{ experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"TimeUnit\"]: experiment_description[\"StopConditionTriggerLogic\"][\"InspectionParameters\"][\"RepetitionPeriod\"]}).total_seconds() def start_threads(self): \"\"\" Start 2", "logic of Stop Condition should be overridden in this method. Later, this method", "abstractmethod import pika from tools.mongo_dao import MongoDB class StopCondition(ABC): def __init__(self, stop_condition_parameters: dict,", "for sending the update only when decision changes while self.thread_is_active: # time.sleep blocks", "self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name) self._is_interrupted = False self.consume_channel.basic_consume(queue=self.termination_queue_name, auto_ack=True, on_message_callback=self.stop_condition.stop_threads) def stop(self): \"\"\" The", "port=self.stop_condition.event_port)) self.consume_channel = self.connection.channel() self.termination_result = self.consume_channel.queue_declare(queue='', exclusive=True) self.termination_queue_name = self.termination_result.method.queue self.consume_channel.queue_bind(exchange='brise_termination_sender', queue=self.termination_queue_name)", "this method. Later, this method will be called in `self_evaluation` method with defined" ]
[ "== 1: f_id = random.randint(0,8) #print f_id query = query + fields[f_id] else", "print get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print", "get_category_code(): code = random.randint(1,33) return code \"\"\" print get_lang() print get_segment_id() print get_contract_id()", "\",\"In Network \"] days = [\"30 days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) +", "print get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc() print", "get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq): qrs = [] lim = random.randint(200,800) while", "while len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f =", "= \"Select \" no_f = random.randint(1,9) #print no_f if no_f == 9 :", "print get_lang() print get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level() print", "+ str(lim) if query not in qrs: qrs.append(query) return qrs def gen_rand_query(noq): count", "\" no_f = random.randint(1,9) #print no_f if no_f == 9 : query =", ": f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query = query + fields[f_id[i]]", "range(1,noq): #print \"query number:\" + str(count) query = setquery() #count+=1 #end_time = time.time()-start_time", "\"query number:\" + str(count) query = setquery() #count+=1 #end_time = time.time()-start_time #print str(end_time)", "from mytable LIMIT \" + str(lim) if query not in qrs: qrs.append(query) return", "qrs def gen_rand_query(noq): count = 1 #start_time = time.time() for j in range(1,noq):", "get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return seg_id def get_contract_id(): contract_id = random.randint(28,3361)", "+ fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query =", "return tier def get_sentence_sort_order(): order = random.randint(1,2564) return order def get_category_code(): code =", "import random import time def get_lang(): Lang = [\"English\",\"Spanish\"] id = random.randint(0,1) print", "contract_id = \"0\"+str(contract_id) if contract_id >27 and contract_id <100: contract_id = \"00\" +", "[] lim = random.randint(200,800) while len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query =", "= random.randint(1,103214) #print seg_id return seg_id def get_contract_id(): contract_id = random.randint(28,3361) if contract_id", "+ fields[f_id[i+1]] query = query + \" from mytable LIMIT \" + str(lim)", "get_lang(): Lang = [\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id():", "def get_contract_id(): contract_id = random.randint(28,3361) if contract_id > 99 and contract_id < 1000:", "random.randint(1,33) return code \"\"\" print get_lang() print get_segment_id() print get_contract_id() print get_plan_id() print", "\",\"In Network gap \",\"In Network \"] days = [\"30 days\",\"60 days\",\"90 days\"] tier", "query = query + \"*\" elif no_f == 1: f_id = random.randint(0,8) #print", "random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query = query + fields[f_id[i]] + \",\" query", "f_id query = query + fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for i", "query + \" from mytable LIMIT \" + str(lim) if query not in", "= 1 #start_time = time.time() for j in range(1,noq): #print \"query number:\" +", ": query = query + \"*\" elif no_f == 1: f_id = random.randint(0,8)", "for i in range(0,no_f-1): query = query + fields[f_id[i]] + \",\" query =", "i in range(0,no_f-1): query = query + fields[f_id[i]] + \",\" query = query", ">27 and contract_id <100: contract_id = \"00\" + str(contract_id) #print contract_id return contract_id", "random import time def get_lang(): Lang = [\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id])", "return order def get_category_code(): code = random.randint(1,33) return code \"\"\" print get_lang() print", "contract_id return contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level():", "contract_id = \"00\" + str(contract_id) #print contract_id return contract_id def get_plan_id(): return random.randint(1,220)", "setquery(noq): qrs = [] lim = random.randint(200,800) while len(qrs) < noq: fields =", "str(contract_id) #print contract_id return contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013", "str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order = random.randint(1,2564) return order def", "= random.randint(200,800) while len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \"", "#print seg_id return seg_id def get_contract_id(): contract_id = random.randint(28,3361) if contract_id > 99", "contract_id > 99 and contract_id < 1000: contract_id = \"0\"+str(contract_id) if contract_id >27", "print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq): qrs = [] lim", "in range(1,noq): #print \"query number:\" + str(count) query = setquery() #count+=1 #end_time =", "Lang = [\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id", "j in range(1,noq): #print \"query number:\" + str(count) query = setquery() #count+=1 #end_time", "1: f_id = random.randint(0,8) #print f_id query = query + fields[f_id] else :", "< noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f = random.randint(1,9) #print", "= random.randint(1,2564) return order def get_category_code(): code = random.randint(1,33) return code \"\"\" print", "def setquery(noq): qrs = [] lim = random.randint(200,800) while len(qrs) < noq: fields", "elif no_f == 1: f_id = random.randint(0,8) #print f_id query = query +", "order gap \",\"Mail order \",\"In Network gap \",\"In Network \"] days = [\"30", "\"] days = [\"30 days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return", "query not in qrs: qrs.append(query) return qrs def gen_rand_query(noq): count = 1 #start_time", "and contract_id <100: contract_id = \"00\" + str(contract_id) #print contract_id return contract_id def", "def gen_rand_query(noq): count = 1 #start_time = time.time() for j in range(1,noq): #print", "query + fields[f_id[i]] + \",\" query = query + fields[f_id[i+1]] query = query", "= random.randint(28,3361) if contract_id > 99 and contract_id < 1000: contract_id = \"0\"+str(contract_id)", "len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f = random.randint(1,9)", "if no_f == 9 : query = query + \"*\" elif no_f ==", "if contract_id >27 and contract_id <100: contract_id = \"00\" + str(contract_id) #print contract_id", "#print f_id query = query + fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for", "qrs = [] lim = random.randint(200,800) while len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"]", "2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail order gap \",\"Mail", "= \"00\" + str(contract_id) #print contract_id return contract_id def get_plan_id(): return random.randint(1,220) def", "get_contract_id(): contract_id = random.randint(28,3361) if contract_id > 99 and contract_id < 1000: contract_id", "print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq): qrs =", "\"*\" elif no_f == 1: f_id = random.randint(0,8) #print f_id query = query", "__author__ = 'Nispand' import random import time def get_lang(): Lang = [\"English\",\"Spanish\"] id", "def get_category_code(): code = random.randint(1,33) return code \"\"\" print get_lang() print get_segment_id() print", "= [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f = random.randint(1,9) #print no_f if no_f", "qrs.append(query) return qrs def gen_rand_query(noq): count = 1 #start_time = time.time() for j", "\"\"\" def setquery(noq): qrs = [] lim = random.randint(200,800) while len(qrs) < noq:", "def get_sentence_sort_order(): order = random.randint(1,2564) return order def get_category_code(): code = random.randint(1,33) return", "qrs: qrs.append(query) return qrs def gen_rand_query(noq): count = 1 #start_time = time.time() for", "def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return seg_id def get_contract_id(): contract_id =", "[\"Mail order gap \",\"Mail order \",\"In Network gap \",\"In Network \"] days =", "else : f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query = query +", "= random.randint(1,9) #print no_f if no_f == 9 : query = query +", "return code \"\"\" print get_lang() print get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year()", "query = query + fields[f_id[i+1]] query = query + \" from mytable LIMIT", "def get_lang(): Lang = [\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id]) return Lang[id] def", "get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq): qrs", "[\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214)", "= 'Nispand' import random import time def get_lang(): Lang = [\"English\",\"Spanish\"] id =", "get_lang() print get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc()", "= query + fields[f_id[i+1]] query = query + \" from mytable LIMIT \"", "query + fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query", "<100: contract_id = \"00\" + str(contract_id) #print contract_id return contract_id def get_plan_id(): return", "+ \"*\" elif no_f == 1: f_id = random.randint(0,8) #print f_id query =", "= [] lim = random.randint(200,800) while len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query", "99 and contract_id < 1000: contract_id = \"0\"+str(contract_id) if contract_id >27 and contract_id", "print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return seg_id", "order = random.randint(1,2564) return order def get_category_code(): code = random.randint(1,33) return code \"\"\"", "print get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq): qrs = [] lim = random.randint(200,800)", "gen_rand_query(noq): count = 1 #start_time = time.time() for j in range(1,noq): #print \"query", "count = 1 #start_time = time.time() for j in range(1,noq): #print \"query number:\"", "Network \"] days = [\"30 days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)])", "str(count) query = setquery() #count+=1 #end_time = time.time()-start_time #print str(end_time) + \"seconds\" return", "no_f if no_f == 9 : query = query + \"*\" elif no_f", "return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return seg_id def get_contract_id():", "range(0,no_f-1): query = query + fields[f_id[i]] + \",\" query = query + fields[f_id[i+1]]", "= [\"Mail order gap \",\"Mail order \",\"In Network gap \",\"In Network \"] days", "= random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query = query + fields[f_id[i]] + \",\"", "return seg_id def get_contract_id(): contract_id = random.randint(28,3361) if contract_id > 99 and contract_id", "days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order =", "get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail order", "get_sentence_sort_order(): order = random.randint(1,2564) return order def get_category_code(): code = random.randint(1,33) return code", "no_f == 1: f_id = random.randint(0,8) #print f_id query = query + fields[f_id]", "+ fields[f_id[i]] + \",\" query = query + fields[f_id[i+1]] query = query +", "[\"30 days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order():", "get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq): qrs = []", "get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail order gap \",\"Mail order \",\"In", "< 1000: contract_id = \"0\"+str(contract_id) if contract_id >27 and contract_id <100: contract_id =", "contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978)", "\",\" query = query + fields[f_id[i+1]] query = query + \" from mytable", "+ str(contract_id) #print contract_id return contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year(): return", "str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order = random.randint(1,2564) return order def get_category_code(): code", "= query + fields[f_id[i]] + \",\" query = query + fields[f_id[i+1]] query =", "\" from mytable LIMIT \" + str(lim) if query not in qrs: qrs.append(query)", "query = \"Select \" no_f = random.randint(1,9) #print no_f if no_f == 9", "not in qrs: qrs.append(query) return qrs def gen_rand_query(noq): count = 1 #start_time =", "#print \"query number:\" + str(count) query = setquery() #count+=1 #end_time = time.time()-start_time #print", "\" + str(lim) if query not in qrs: qrs.append(query) return qrs def gen_rand_query(noq):", "mytable LIMIT \" + str(lim) if query not in qrs: qrs.append(query) return qrs", "get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq): qrs = [] lim =", "= \"0\"+str(contract_id) if contract_id >27 and contract_id <100: contract_id = \"00\" + str(contract_id)", "\"0\"+str(contract_id) if contract_id >27 and contract_id <100: contract_id = \"00\" + str(contract_id) #print", "no_f == 9 : query = query + \"*\" elif no_f == 1:", "Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return seg_id def get_contract_id(): contract_id", "get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order()", "seg_id def get_contract_id(): contract_id = random.randint(28,3361) if contract_id > 99 and contract_id <", "'Nispand' import random import time def get_lang(): Lang = [\"English\",\"Spanish\"] id = random.randint(0,1)", "noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f = random.randint(1,9) #print no_f", "contract_id = random.randint(28,3361) if contract_id > 99 and contract_id < 1000: contract_id =", "print get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\"", "if contract_id > 99 and contract_id < 1000: contract_id = \"0\"+str(contract_id) if contract_id", "id = random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print", "get_category_code() \"\"\" def setquery(noq): qrs = [] lim = random.randint(200,800) while len(qrs) <", "fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f = random.randint(1,9) #print no_f if", "query = query + fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for i in", "= random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id", "contract_id <100: contract_id = \"00\" + str(contract_id) #print contract_id return contract_id def get_plan_id():", "= query + fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1):", "= query + \" from mytable LIMIT \" + str(lim) if query not", "order def get_category_code(): code = random.randint(1,33) return code \"\"\" print get_lang() print get_segment_id()", "code \"\"\" print get_lang() print get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year() print", "\"00\" + str(contract_id) #print contract_id return contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year():", "== 9 : query = query + \"*\" elif no_f == 1: f_id", "return qrs def gen_rand_query(noq): count = 1 #start_time = time.time() for j in", "print get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\" def setquery(noq):", "= time.time() for j in range(1,noq): #print \"query number:\" + str(count) query =", "and contract_id < 1000: contract_id = \"0\"+str(contract_id) if contract_id >27 and contract_id <100:", "+ str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order = random.randint(1,2564) return order def get_category_code():", "\"Select \" no_f = random.randint(1,9) #print no_f if no_f == 9 : query", "fields[f_id] else : f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query = query", "return random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail order gap \",\"Mail order \",\"In Network", "\"\"\" print get_lang() print get_segment_id() print get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level()", "if query not in qrs: qrs.append(query) return qrs def gen_rand_query(noq): count = 1", "contract_id < 1000: contract_id = \"0\"+str(contract_id) if contract_id >27 and contract_id <100: contract_id", "days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order", "tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order = random.randint(1,2564) return", "gap \",\"In Network \"] days = [\"30 days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)])", "1000: contract_id = \"0\"+str(contract_id) if contract_id >27 and contract_id <100: contract_id = \"00\"", "str(lim) if query not in qrs: qrs.append(query) return qrs def gen_rand_query(noq): count =", "= query + \"*\" elif no_f == 1: f_id = random.randint(0,8) #print f_id", "= random.randint(0,8) #print f_id query = query + fields[f_id] else : f_id =", "random.randint(0,8) #print f_id query = query + fields[f_id] else : f_id = random.sample(range(0,no_f),no_f)", "import time def get_lang(): Lang = [\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id]) return", "query = query + fields[f_id[i]] + \",\" query = query + fields[f_id[i+1]] query", "in qrs: qrs.append(query) return qrs def gen_rand_query(noq): count = 1 #start_time = time.time()", "query + fields[f_id[i+1]] query = query + \" from mytable LIMIT \" +", "number:\" + str(count) query = setquery() #count+=1 #end_time = time.time()-start_time #print str(end_time) +", "days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order = random.randint(1,2564)", "[\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f = random.randint(1,9) #print no_f if no_f ==", "9 : query = query + \"*\" elif no_f == 1: f_id =", "1 #start_time = time.time() for j in range(1,noq): #print \"query number:\" + str(count)", "random.randint(1,103214) #print seg_id return seg_id def get_contract_id(): contract_id = random.randint(28,3361) if contract_id >", "type(Lang[id]) return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return seg_id def", "gap \",\"Mail order \",\"In Network gap \",\"In Network \"] days = [\"30 days\",\"60", "contract_id >27 and contract_id <100: contract_id = \"00\" + str(contract_id) #print contract_id return", "= [\"30 days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def", "no_f = random.randint(1,9) #print no_f if no_f == 9 : query = query", "random.randint(1,9) #print no_f if no_f == 9 : query = query + \"*\"", "in range(0,no_f-1): query = query + fields[f_id[i]] + \",\" query = query +", "LIMIT \" + str(lim) if query not in qrs: qrs.append(query) return qrs def", "random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values =", "query + \"*\" elif no_f == 1: f_id = random.randint(0,8) #print f_id query", "random.randint(1,2564) return order def get_category_code(): code = random.randint(1,33) return code \"\"\" print get_lang()", "random.randint(28,3361) if contract_id > 99 and contract_id < 1000: contract_id = \"0\"+str(contract_id) if", "fields[f_id[i]] + \",\" query = query + fields[f_id[i+1]] query = query + \"", "random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail order gap \",\"Mail order \",\"In Network gap", "= str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier def get_sentence_sort_order(): order = random.randint(1,2564) return order", "tier def get_sentence_sort_order(): order = random.randint(1,2564) return order def get_category_code(): code = random.randint(1,33)", "query = query + \" from mytable LIMIT \" + str(lim) if query", "for j in range(1,noq): #print \"query number:\" + str(count) query = setquery() #count+=1", "get_tier_type_desc(): str_values = [\"Mail order gap \",\"Mail order \",\"In Network gap \",\"In Network", "#print no_f if no_f == 9 : query = query + \"*\" elif", "time def get_lang(): Lang = [\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id]) return Lang[id]", "random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id = random.randint(1,103214) #print seg_id return", "Network gap \",\"In Network \"] days = [\"30 days\",\"60 days\",\"90 days\"] tier =", "f_id = random.randint(0,8) #print f_id query = query + fields[f_id] else : f_id", "#start_time = time.time() for j in range(1,noq): #print \"query number:\" + str(count) query", "str_values = [\"Mail order gap \",\"Mail order \",\"In Network gap \",\"In Network \"]", "get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc():", "return random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values", "order \",\"In Network gap \",\"In Network \"] days = [\"30 days\",\"60 days\",\"90 days\"]", "lim = random.randint(200,800) while len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select", "def get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail", "code = random.randint(1,33) return code \"\"\" print get_lang() print get_segment_id() print get_contract_id() print", "+ \" from mytable LIMIT \" + str(lim) if query not in qrs:", "query = setquery() #count+=1 #end_time = time.time()-start_time #print str(end_time) + \"seconds\" return query", "return contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level(): return", "get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code() \"\"\" def", "seg_id = random.randint(1,103214) #print seg_id return seg_id def get_contract_id(): contract_id = random.randint(28,3361) if", "\",\"Mail order \",\"In Network gap \",\"In Network \"] days = [\"30 days\",\"60 days\",\"90", "return 2013 def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail order gap", "+ str(count) query = setquery() #count+=1 #end_time = time.time()-start_time #print str(end_time) + \"seconds\"", "days = [\"30 days\",\"60 days\",\"90 days\"] tier = str(str_values[random.randint(0,3)]) + str(days[random.randint(0,2)]) return tier", "time.time() for j in range(1,noq): #print \"query number:\" + str(count) query = setquery()", "get_contract_id() print get_plan_id() print get_contract_year() print get_tier_level() print get_tier_type_desc() print get_sentence_sort_order() print get_category_code()", "> 99 and contract_id < 1000: contract_id = \"0\"+str(contract_id) if contract_id >27 and", "def get_tier_level(): return random.randint(1,978) def get_tier_type_desc(): str_values = [\"Mail order gap \",\"Mail order", "seg_id return seg_id def get_contract_id(): contract_id = random.randint(28,3361) if contract_id > 99 and", "def get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013 def get_tier_level(): return random.randint(1,978) def", "def get_tier_type_desc(): str_values = [\"Mail order gap \",\"Mail order \",\"In Network gap \",\"In", "= random.randint(1,33) return code \"\"\" print get_lang() print get_segment_id() print get_contract_id() print get_plan_id()", "+ \",\" query = query + fields[f_id[i+1]] query = query + \" from", "fields[f_id[i+1]] query = query + \" from mytable LIMIT \" + str(lim) if", "f_id = random.sample(range(0,no_f),no_f) for i in range(0,no_f-1): query = query + fields[f_id[i]] +", "= [\"English\",\"Spanish\"] id = random.randint(0,1) print type(Lang[id]) return Lang[id] def get_segment_id(): seg_id =", "random.randint(200,800) while len(qrs) < noq: fields = [\"Lang\",\"segment_id\",\"contract_id\",\"plan_id\",\"contract_year\",\"tier_level\",\"tier_type_desc\",\"sentences_sort_order\",\"category_code\"] query = \"Select \" no_f", "print get_category_code() \"\"\" def setquery(noq): qrs = [] lim = random.randint(200,800) while len(qrs)", "#print contract_id return contract_id def get_plan_id(): return random.randint(1,220) def get_contract_year(): return 2013 def" ]
[ "= load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number of total classes num_classes", "0]] else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected & {} slices", "* 1000 class_duration = encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic,", "resetting sequence class Session(): def __init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx =", "self.ctx.elog('The given model was trained with a different ' 'amount of classes: given", "# Percentage of chance for resetting sequence class Session(): def __init__(self, ctx, voice,", "in range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples'", "detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density based on amount of onsets self._density", "= self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given model was trained with a", "RuntimeError: self.ctx.vlog( 'Not enough sample data for MFCC analysis') else: # Calculate RMS", "self._voice.version == 1 or class_sound != SILENCE_CLASS: # Find closest sound to this", "# Calculate number of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes =", "2: smiley = '☺' if wav else '☹' self.ctx.vlog('{} find sound (class={}, '", "# Decode class back into sub classes class_sound, class_dynamic, class_duration = decode_classes( result_class,", "Only show this when able to work with dynamics etc. if self._voice.version ==", "self._play_thread.daemon = True self._lock = threading.Lock() # Prepare playing logic self._sequence = []", "1 # Remove oldest event from sequence queue self._sequence = self._sequence[1:] if random.random()", "MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector,", "None: reference_voice = voice else: voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points)", "Prepare audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume'))", "self.ctx.log('Loading ..') # Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True", "= self.seq_len * self.penalty if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] # Check", "from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check", "self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do not do anything when this is", "self._sequence.append(feature_vector) # Check for too long sequences, cut it if necessary penalty =", "threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running = False def run(self): while", "to this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show this", "MAX_DENSITY_ONSETS = 10 # How many offsets for max density PLAY_DELAY_EXP = 5", "audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except", "time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs) > 1: # Get next wav", "os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number", "len(frames), np.max(get_db(frames)))) # Detect onsets in available data onsets, _ = detect_onsets(frames, self.samplerate,", "available data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density based", "min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into parts when possible slices", "Play all possible subsequences min_index = max_index - self.seq_len if min_index < 0:", "to work with dynamics etc. if self._voice.version == 2: smiley = '☺' if", "to our sequence queue self._sequence.append(feature_vector) # Check for too long sequences, cut it", "len(onsets) == 0 and not is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]] else:", "5 # Exponent for maximum density delay RESET_PROPABILITY = 0.1 # Percentage of", "reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations =", "SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes)", "the softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode class", "analysis') else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item()", "0.1 # Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS = 10 # How", "= y[0] # Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog(", "range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name,", "kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters can be changed", "KMeans import librosa import numpy as np import tensorflow as tf from tomomibot.audio", "from point class_sound = self._kmeans.predict([point])[0] # Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms)", "as np import tensorflow as tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent,", "CHECK_WAV_INTERVAL = 0.1 # Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS = 10", "self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon =", "duration = len(y_slice) / self.samplerate * 1000 class_duration = encode_duration_class(duration) # Encode it!", "True self._lock = threading.Lock() # Prepare playing logic self._sequence = [] self._wavs =", "np.float32(np.max(rms_data)).item() # Project point into given voice PCA space point = self._voice.project([mfcc])[0].flatten() #", "Predict next action via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break", "etc. if self._voice.version == 2: smiley = '☺' if wav else '☹' self.ctx.vlog('{}", "slices for y in slices: y_slice = y[0] # Calculate MFCCs try: mfcc", "ready for being used in another thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(),", "return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in available data", "frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames),", "10 # How many offsets for max density PLAY_DELAY_EXP = 5 # Exponent", "self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len", "chance for resetting sequence class Session(): def __init__(self, ctx, voice, model, reference_voice=None, **kwargs):", "play from queue wav = self._wavs[0] self.ctx.vlog( '▶ play .wav sample \"{}\" (queue={},", "1000 class_duration = encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration,", "load_model from sklearn.cluster import KMeans import librosa import numpy as np import tensorflow", "MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample data", "self._temperature = kwargs.get('temperature') # Prepare audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'),", "else: voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes", "voice and k-means clustering if reference_voice is None: reference_voice = voice else: voice.fit(reference_voice)", "= np.float32(np.max(rms_data)).item() # Project point into given voice PCA space point = self._voice.project([mfcc])[0].flatten()", "density based on amount of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS", "False def run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def play(self):", "= self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show this when able to work", "tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL =", "= value def reset_sequence(self): with self._lock: self._sequence = [] def start(self): self.is_running =", "keras.models import load_model from sklearn.cluster import KMeans import librosa import numpy as np", "self.is_running: with self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return", "{}, but ' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means clustering", "import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1", "master_volume(self, value): self._audio.volume = value @property def interval(self): return self._interval @interval.setter def interval(self,", "max_index - self.seq_len if min_index < 0: break sequence_slice = self._sequence[min_index:max_index] # Predict", "< self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence) while True: # Play", "be {}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means clustering if reference_voice is None:", "self._kmeans.fit(reference_voice.points) # Get the classes of the voice sound material / points point_classes", "KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of the voice sound material / points", "signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format(", "\"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the sample a little", "Slice audio into parts when possible slices = [] if len(onsets) == 0", "mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample data for MFCC", "value def reset_sequence(self): with self._lock: self._sequence = [] def start(self): self.is_running = True", "too long sequences, cut it if necessary penalty = self.seq_len * self.penalty if", "np.max(get_db(frames)))) # Detect onsets in available data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db)", "[] self._wavs = [] self._density = 0.0 self.is_running = False # Load model", "How many offsets for max density PLAY_DELAY_EXP = 5 # Exponent for maximum", "the sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play", "it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it to", "class_duration, self.use_dynamics, self.use_durations) # Add it to our sequence queue self._sequence.append(feature_vector) # Check", "# Play all possible subsequences min_index = max_index - self.seq_len if min_index <", "for max density PLAY_DELAY_EXP = 5 # Exponent for maximum density delay RESET_PROPABILITY", "== idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name, len(voice.points))) @property def master_volume(self):", "not is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]] else: slices = slice_audio(frames, onsets,", "len(y_slice) / self.samplerate * 1000 class_duration = encode_duration_class(duration) # Encode it! feature_vector =", "= voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of the voice", "from tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL", "value @property def temperature(self): return self._temperature @temperature.setter def temperature(self, value): with self._lock: self._temperature", "def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume = value @property def", "if wav else '☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound,", "# Play it! self._audio.play(wav) # Remove the played sample from our queue self._wavs", "if len(onsets) == 0 and not is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]]", "from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER,", "trim=False) self.ctx.vlog('{} onsets detected & {} slices generated'.format( len(onsets), len(slices))) # Analyze and", "Project point into given voice PCA space point = self._voice.project([mfcc])[0].flatten() # Predict k-means", "import time from keras.models import load_model from sklearn.cluster import KMeans import librosa import", "k-means clustering if reference_voice is None: reference_voice = voice else: voice.fit(reference_voice) self._voice =", "= len(y_slice) / self.samplerate * 1000 class_duration = encode_duration_class(duration) # Encode it! feature_vector", "def tick(self): \"\"\"Main routine for live sessions\"\"\" # Read current frame buffer from", "break # Reweight the softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted)", "self._audio.play(wav) # Remove the played sample from our queue self._wavs = self._wavs[1:] def", "# Get next wav file to play from queue wav = self._wavs[0] self.ctx.vlog(", "trained with a different ' 'amount of classes: given {}, but ' 'should", "self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of the voice sound material", "when this is silence if self._voice.version == 1 or class_sound != SILENCE_CLASS: #", "silence if self._voice.version == 1 or class_sound != SILENCE_CLASS: # Find closest sound", "# Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS = 10 # How many", "while True: # Play all possible subsequences min_index = max_index - self.seq_len if", "Get the classes of the voice sound material / points point_classes = self._kmeans.predict(self._voice.points)", "distribution result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode class back into", "Decode class back into sub classes class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes,", "encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS", "Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play,", "action via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break # Reweight", "= encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations)", "0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in available", "return self._temperature @temperature.setter def temperature(self, value): with self._lock: self._temperature = value def reset_sequence(self):", "tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS", "True # Start reading audio signal _input self._audio.start() # Start threads self._thread.start() self._play_thread.start()", "sklearn.cluster import KMeans import librosa import numpy as np import tensorflow as tf", "sequence queue self._sequence.append(feature_vector) # Check for too long sequences, cut it if necessary", "self._wavs = [] self._density = 0.0 self.is_running = False # Load model &", "classes of the voice sound material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes =", "= kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db =", "MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number of", "AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading", "# Project point into given voice PCA space point = self._voice.project([mfcc])[0].flatten() # Predict", "is silence if self._voice.version == 1 or class_sound != SILENCE_CLASS: # Find closest", "= False def run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def", "class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do not do", "queue self._sequence.append(feature_vector) # Check for too long sequences, cut it if necessary penalty", "sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index", "err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon", "for MFCC analysis') else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms", "These parameters can be changed during performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature')", "= '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph =", "[] self._density = 0.0 self.is_running = False # Load model & make it", "something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence) while", "of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes", "input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read {0} frames", "when able to work with dynamics etc. if self._voice.version == 2: smiley =", "import librosa import numpy as np import tensorflow as tf from tomomibot.audio import", "ctx, voice, model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics =", "Remove the played sample from our queue self._wavs = self._wavs[1:] def tick(self): \"\"\"Main", "if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] # Check if we already have", "all possible subsequences min_index = max_index - self.seq_len if min_index < 0: break", "slices generated'.format( len(onsets), len(slices))) # Analyze and categorize slices for y in slices:", "rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point into given", "self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio I/O try: self._audio =", "self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running = False", "self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break # Reweight the softmax distribution result_reweighted =", "rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it! self._audio.play(wav) # Remove the", "self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters can", "changed during performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio I/O", "class_duration = encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics,", "0.1 # Percentage of chance for resetting sequence class Session(): def __init__(self, ctx,", "self.is_running = False # Load model & make it ready for being used", "{} slices generated'.format( len(onsets), len(slices))) # Analyze and categorize slices for y in", "\"{}\" with {} samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter def", "= True # Start reading audio signal _input self._audio.start() # Start threads self._thread.start()", "num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given model was trained with", "RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads self._thread = threading.Thread(target=self.run,", "class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -= 1 # Remove oldest event from", "on amount of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice", "for idx in range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with", "self._temperature) result_class = np.argmax(result_reweighted) # Decode class back into sub classes class_sound, class_dynamic,", "Prepare voice and k-means clustering if reference_voice is None: reference_voice = voice else:", "or class_sound != SILENCE_CLASS: # Find closest sound to this point wav =", "Find closest sound to this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) #", "in slices: y_slice = y[0] # Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate)", "kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters can be changed during performance self._interval", "import threading import time from keras.models import load_model from sklearn.cluster import KMeans import", "point = self._voice.project([mfcc])[0].flatten() # Predict k-means class from point class_sound = self._kmeans.predict([point])[0] #", "wav: self._wavs.append(wav) max_index -= 1 # Remove oldest event from sequence queue self._sequence", "kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len')", "__init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics", "to do something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index =", "onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density based on amount", "librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point into given voice PCA", "'{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph()", "= [] if len(onsets) == 0 and not is_silent(frames, self.threshold_db): slices = [[frames,", "Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it", "[[frames, 0, 0]] else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected &", "decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do not do anything when", "# Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration class duration", "if num_model_classes != num_classes: self.ctx.elog('The given model was trained with a different '", "def interval(self): return self._interval @interval.setter def interval(self, value): with self._lock: self._interval = value", "already have enough data to do something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return", "can be changed during performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare", "played sample from our queue self._wavs = self._wavs[1:] def tick(self): \"\"\"Main routine for", "tick(self): \"\"\"Main routine for live sessions\"\"\" # Read current frame buffer from input", "self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs)", "interval(self): return self._interval @interval.setter def interval(self, value): with self._lock: self._interval = value @property", "tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav", "self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty", "= max_index - self.seq_len if min_index < 0: break sequence_slice = self._sequence[min_index:max_index] #", "if wav: self._wavs.append(wav) max_index -= 1 # Remove oldest event from sequence queue", "self._sequence = self._sequence[penalty:] # Check if we already have enough data to do", "import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav queue", "(AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train", "# These parameters can be changed during performance self._interval = kwargs.get('interval') self._temperature =", "= True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock = threading.Lock() #", "slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected & {} slices generated'.format( len(onsets), len(slices))) #", "from keras.models import load_model from sklearn.cluster import KMeans import librosa import numpy as", "Reweight the softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode", "& {} slices generated'.format( len(onsets), len(slices))) # Analyze and categorize slices for y", "= voice else: voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get", ">1: Do not do anything when this is silence if self._voice.version == 1", "interval(self, value): with self._lock: self._interval = value @property def temperature(self): return self._temperature @temperature.setter", "self.samplerate * 1000 class_duration = encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound,", "= self._voice.project([mfcc])[0].flatten() # Predict k-means class from point class_sound = self._kmeans.predict([point])[0] # Get", "self._interval = value @property def temperature(self): return self._temperature @temperature.setter def temperature(self, value): with", "play .wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the", "# Add it to our sequence queue self._sequence.append(feature_vector) # Check for too long", "class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do", ".format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume =", "self.seq_len if min_index < 0: break sequence_slice = self._sequence[min_index:max_index] # Predict next action", "# Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project", "Predict k-means class from point class_sound = self._kmeans.predict([point])[0] # Get dynamic class class_dynamic", "y in slices: y_slice = y[0] # Calculate MFCCs try: mfcc = mfcc_features(y_slice,", "model was trained with a different ' 'amount of classes: given {}, but", "temperature(self, value): with self._lock: self._temperature = value def reset_sequence(self): with self._lock: self._sequence =", "self._wavs[0] self.ctx.vlog( '▶ play .wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) #", "# Analyze and categorize slices for y in slices: y_slice = y[0] #", "= kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters can be changed during performance", "class from point class_sound = self._kmeans.predict([point])[0] # Get dynamic class class_dynamic = encode_dynamic_class(class_sound,", "sessions\"\"\" # Read current frame buffer from input signal frames = np.array(self._audio.read_frames()).flatten() if", "frame buffer from input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return", "(in seconds) MAX_DENSITY_ONSETS = 10 # How many offsets for max density PLAY_DELAY_EXP", "kwargs.get('temperature') # Prepare audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'),", "max_index -= 1 # Remove oldest event from sequence queue self._sequence = self._sequence[1:]", "# Detect onsets in available data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) #", "used in another thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model", "value @property def interval(self): return self._interval @interval.setter def interval(self, value): with self._lock: self._interval", "from our queue self._wavs = self._wavs[1:] def tick(self): \"\"\"Main routine for live sessions\"\"\"", "softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode class back", "maximum density delay RESET_PROPABILITY = 0.1 # Percentage of chance for resetting sequence", "point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in range(num_classes): indices = np.where(point_classes", "point into given voice PCA space point = self._voice.project([mfcc])[0].flatten() # Predict k-means class", "self._kmeans.predict([point])[0] # Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration class", "' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means clustering if reference_voice", "self._temperature = value def reset_sequence(self): with self._lock: self._sequence = [] def start(self): self.is_running", "_input self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running =", "kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate')", "Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS = 10 # How many offsets", "was trained with a different ' 'amount of classes: given {}, but '", "{}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means clustering if reference_voice is None: reference_voice", "find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav)", "points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in range(num_classes): indices =", "self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number of total classes", "# Prepare voice and k-means clustering if reference_voice is None: reference_voice = voice", "penalty: self._sequence = self._sequence[penalty:] # Check if we already have enough data to", "do anything when this is silence if self._voice.version == 1 or class_sound !=", "self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running = False def run(self): while self.is_running: time.sleep(self._interval) if", "self._point_classes = [] for idx in range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0])", "voice else: voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the", "but ' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means clustering if", "# Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough", "Load model & make it ready for being used in another thread model_name", "threading import time from keras.models import load_model from sklearn.cluster import KMeans import librosa", "# Version >1: Do not do anything when this is silence if self._voice.version", "buffer from input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read", "Session(): def __init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes =", "0, 0]] else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected & {}", "the voice sound material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for", "= self._wavs[1:] def tick(self): \"\"\"Main routine for live sessions\"\"\" # Read current frame", "self._density)) # Delay playing the sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP) *", "self._lock = threading.Lock() # Prepare playing logic self._sequence = [] self._wavs = []", "self._wavs[1:] def tick(self): \"\"\"Main routine for live sessions\"\"\" # Read current frame buffer", "file to play from queue wav = self._wavs[0] self.ctx.vlog( '▶ play .wav sample", "> 1: # Get next wav file to play from queue wav =", "reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode class back into sub classes class_sound,", "MAX_DENSITY_ONSETS # Slice audio into parts when possible slices = [] if len(onsets)", "# Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add", "return with self._graph.as_default(): max_index = len(self._sequence) while True: # Play all possible subsequences", "@interval.setter def interval(self, value): with self._lock: self._interval = value @property def temperature(self): return", "based on amount of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS #", "into given voice PCA space point = self._voice.project([mfcc])[0].flatten() # Predict k-means class from", "for resetting sequence class Session(): def __init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx", "len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into parts when possible slices = []", "classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes:", "onsets, trim=False) self.ctx.vlog('{} onsets detected & {} slices generated'.format( len(onsets), len(slices))) # Analyze", "subsequences min_index = max_index - self.seq_len if min_index < 0: break sequence_slice =", "not self.is_running: return if len(self._wavs) > 1: # Get next wav file to", "= kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These", "from sequence queue self._sequence = self._sequence[1:] if random.random() < RESET_PROPABILITY: self._sequence = []", "Exponent for maximum density delay RESET_PROPABILITY = 0.1 # Percentage of chance for", "samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume", "our sequence queue self._sequence.append(feature_vector) # Check for too long sequences, cut it if", "current frame buffer from input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0:", "start(self): self.is_running = True # Start reading audio signal _input self._audio.start() # Start", "def stop(self): self._audio.stop() self.is_running = False def run(self): while self.is_running: time.sleep(self._interval) if self.is_running:", "from queue wav = self._wavs[0] self.ctx.vlog( '▶ play .wav sample \"{}\" (queue={}, density={})'.format(", "indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name, len(voice.points)))", "dynamics etc. if self._voice.version == 2: smiley = '☺' if wav else '☹'", "voice sound material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for idx", "number of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if", "# Get duration class duration = len(y_slice) / self.samplerate * 1000 class_duration =", "self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into parts when", "- self.seq_len if min_index < 0: break sequence_slice = self._sequence[min_index:max_index] # Predict next", "{} samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value):", "this when able to work with dynamics etc. if self._voice.version == 2: smiley", "with a different ' 'amount of classes: given {}, but ' 'should be", "[] def start(self): self.is_running = True # Start reading audio signal _input self._audio.start()", "self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given model was trained with a different", "point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show this when able", "model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations", "material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in range(num_classes):", "/ points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in range(num_classes): indices", "if not self.is_running: return if len(self._wavs) > 1: # Get next wav file", "numpy as np import tensorflow as tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets,", "self._sequence = [] def start(self): self.is_running = True # Start reading audio signal", "class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it to our sequence queue self._sequence.append(feature_vector) #", "def start(self): self.is_running = True # Start reading audio signal _input self._audio.start() #", "is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]] else: slices = slice_audio(frames, onsets, trim=False)", "self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in range(num_classes): indices = np.where(point_classes == idx)", "decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS =", "self._lock: self._interval = value @property def temperature(self): return self._temperature @temperature.setter def temperature(self, value):", "live sessions\"\"\" # Read current frame buffer from input signal frames = np.array(self._audio.read_frames()).flatten()", "* self._density time.sleep(rdm) # Play it! self._audio.play(wav) # Remove the played sample from", "threading.Lock() # Prepare playing logic self._sequence = [] self._wavs = [] self._density =", "(class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -=", "a little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it! self._audio.play(wav)", "for too long sequences, cut it if necessary penalty = self.seq_len * self.penalty", "# Reweight the softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) #", "value): with self._lock: self._interval = value @property def temperature(self): return self._temperature @temperature.setter def", "Get next wav file to play from queue wav = self._wavs[0] self.ctx.vlog( '▶", "self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample data for MFCC analysis') else: #", "oldest event from sequence queue self._sequence = self._sequence[1:] if random.random() < RESET_PROPABILITY: self._sequence", "different ' 'amount of classes: given {}, but ' 'should be {}.'.format(num_classes, num_model_classes))", "sample data for MFCC analysis') else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice) /", "onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into parts", "# Only show this when able to work with dynamics etc. if self._voice.version", "else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() #", "' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -= 1", "smiley = '☺' if wav else '☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={},", "@property def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume = value @property", "given voice PCA space point = self._voice.project([mfcc])[0].flatten() # Predict k-means class from point", "model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number of total", "clustering if reference_voice is None: reference_voice = voice else: voice.fit(reference_voice) self._voice = voice", "num_classes: self.ctx.elog('The given model was trained with a different ' 'amount of classes:", "self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if", "self.ctx.vlog( 'Not enough sample data for MFCC analysis') else: # Calculate RMS rms_data", "self.seq_len * self.penalty if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] # Check if", "density PLAY_DELAY_EXP = 5 # Exponent for maximum density delay RESET_PROPABILITY = 0.1", "with self._lock: self._temperature = value def reset_sequence(self): with self._lock: self._sequence = [] def", "= '☺' if wav else '☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format(", "@temperature.setter def temperature(self, value): with self._lock: self._temperature = value def reset_sequence(self): with self._lock:", "{0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in available data onsets, _", "enough data to do something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default():", "it ready for being used in another thread model_name = '{}.h5'.format(model) model_path =", ".wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the sample", "0.0 self.is_running = False # Load model & make it ready for being", "class_sound, class_dynamic, class_duration) # Only show this when able to work with dynamics", "Remove oldest event from sequence queue self._sequence = self._sequence[1:] if random.random() < RESET_PROPABILITY:", "back into sub classes class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations)", "else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected & {} slices generated'.format(", "Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point", "and categorize slices for y in slices: y_slice = y[0] # Calculate MFCCs", "Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running = False def run(self):", "class Session(): def __init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes", "data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density based on", "PLAY_DELAY_EXP = 5 # Exponent for maximum density delay RESET_PROPABILITY = 0.1 #", "self._temperature @temperature.setter def temperature(self, value): with self._lock: self._temperature = value def reset_sequence(self): with", "= random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it! self._audio.play(wav) # Remove the played", "== 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in", "make it ready for being used in another thread model_name = '{}.h5'.format(model) model_path", "being used in another thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name)", "RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point into", "space point = self._voice.project([mfcc])[0].flatten() # Predict k-means class from point class_sound = self._kmeans.predict([point])[0]", "it! self._audio.play(wav) # Remove the played sample from our queue self._wavs = self._wavs[1:]", "RESET_PROPABILITY = 0.1 # Percentage of chance for resetting sequence class Session(): def", "self._wavs = self._wavs[1:] def tick(self): \"\"\"Main routine for live sessions\"\"\" # Read current", "to play from queue wav = self._wavs[0] self.ctx.vlog( '▶ play .wav sample \"{}\"", "the classes of the voice sound material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes", "1: # Get next wav file to play from queue wav = self._wavs[0]", "frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in available data onsets, _ =", "as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=())", "volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads self._thread", "sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it!", "Check for too long sequences, cut it if necessary penalty = self.seq_len *", "channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent", "encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it to our sequence queue", "this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show this when", "* self.penalty if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] # Check if we", "class_sound = self._kmeans.predict([point])[0] # Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) # Get", "class_duration)) if wav: self._wavs.append(wav) max_index -= 1 # Remove oldest event from sequence", "os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the sample a little bit rdm =", "as tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const", "False # Load model & make it ready for being used in another", "data for MFCC analysis') else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max", "offsets for max density PLAY_DELAY_EXP = 5 # Exponent for maximum density delay", "it to our sequence queue self._sequence.append(feature_vector) # Check for too long sequences, cut", "= tf.get_default_graph() # Calculate number of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations)", "num_model_classes != num_classes: self.ctx.elog('The given model was trained with a different ' 'amount", "class back into sub classes class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics,", "load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number of total classes num_classes =", "Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration class duration =", "self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock = threading.Lock()", "len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence) while True: #", "# Predict k-means class from point class_sound = self._kmeans.predict([point])[0] # Get dynamic class", "encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav queue interval (in", "idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return", "import KMeans import librosa import numpy as np import tensorflow as tf from", "# Prepare audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'),", "0 and not is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]] else: slices =", "self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db", "density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the sample a little bit rdm", "if np.sum(result) == 0: break # Reweight the softmax distribution result_reweighted = reweight_distribution(result,", "wav else '☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic,", "of classes: given {}, but ' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice", "result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do not do anything when this", "= value @property def interval(self): return self._interval @interval.setter def interval(self, value): with self._lock:", "random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it! self._audio.play(wav) # Remove the played sample", "self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock = threading.Lock() # Prepare playing", "PCA space point = self._voice.project([mfcc])[0].flatten() # Predict k-means class from point class_sound =", "class_dynamic, class_duration) # Only show this when able to work with dynamics etc.", "next action via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break #", "with dynamics etc. if self._voice.version == 2: smiley = '☺' if wav else", "channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads", "min_index < 0: break sequence_slice = self._sequence[min_index:max_index] # Predict next action via model", "= 5 # Exponent for maximum density delay RESET_PROPABILITY = 0.1 # Percentage", "Set a density based on amount of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets))", "device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') #", "result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break # Reweight the softmax distribution", "self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs) >", "'amount of classes: given {}, but ' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare", "kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate,", "len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume = value", "for being used in another thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER,", "= self._wavs[0] self.ctx.vlog( '▶ play .wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density))", "args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock =", "< 0: break sequence_slice = self._sequence[min_index:max_index] # Predict next action via model result", "wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show this when able to", "== 0: break # Reweight the softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class", "self.use_durations) # Add it to our sequence queue self._sequence.append(feature_vector) # Check for too", "voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of the voice sound", "dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration class duration = len(y_slice)", "while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL)", "parts when possible slices = [] if len(onsets) == 0 and not is_silent(frames,", "except RuntimeError: self.ctx.vlog( 'Not enough sample data for MFCC analysis') else: # Calculate", "# Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running = False def", "self._voice.project([mfcc])[0].flatten() # Predict k-means class from point class_sound = self._kmeans.predict([point])[0] # Get dynamic", "total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes !=", "self._sequence = [] self._wavs = [] self._density = 0.0 self.is_running = False #", "signal _input self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running", "# Remove the played sample from our queue self._wavs = self._wavs[1:] def tick(self):", "self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav:", "= self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in range(num_classes): indices = np.where(point_classes ==", "class duration = len(y_slice) / self.samplerate * 1000 class_duration = encode_duration_class(duration) # Encode", "tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import", "self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence) while True: # Play all", "threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon", "self._lock: self._temperature = value def reset_sequence(self): with self._lock: self._sequence = [] def start(self):", "= librosa.feature.rms(y=y_slice) / self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point into given voice", "[] if len(onsets) == 0 and not is_silent(frames, self.threshold_db): slices = [[frames, 0,", "have enough data to do something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with", "= threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock = threading.Lock() # Prepare playing logic", "if necessary penalty = self.seq_len * self.penalty if len(self._sequence) > penalty: self._sequence =", "thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function()", "wav file to play from queue wav = self._wavs[0] self.ctx.vlog( '▶ play .wav", "= value @property def temperature(self): return self._temperature @temperature.setter def temperature(self, value): with self._lock:", "tensorflow as tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from", "import tensorflow as tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db)", "model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break # Reweight the softmax", "class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do not", "= slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected & {} slices generated'.format( len(onsets), len(slices)))", "with {} samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self,", "self.is_running: return if len(self._wavs) > 1: # Get next wav file to play", "!= SILENCE_CLASS: # Find closest sound to this point wav = self._voice.find_wav(self._point_classes, class_sound,", "threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock", "threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock = threading.Lock() # Prepare playing logic self._sequence", "audio signal _input self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop()", "np.argmax(result_reweighted) # Decode class back into sub classes class_sound, class_dynamic, class_duration = decode_classes(", "bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it! self._audio.play(wav) # Remove", "self._interval @interval.setter def interval(self, value): with self._lock: self._interval = value @property def temperature(self):", "self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume", "@master_volume.setter def master_volume(self, value): self._audio.volume = value @property def interval(self): return self._interval @interval.setter", "we already have enough data to do something if len(self._sequence) < self.seq_len: self.ctx.vlog('')", "sequence_slice = self._sequence[min_index:max_index] # Predict next action via model result = self._model.predict(np.array([sequence_slice])) if", "from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes,", "= ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty =", "def master_volume(self, value): self._audio.volume = value @property def interval(self): return self._interval @interval.setter def", "do something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence)", "def __init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes')", "and k-means clustering if reference_voice is None: reference_voice = voice else: voice.fit(reference_voice) self._voice", "= encode_dynamic_class(class_sound, rms) # Get duration class duration = len(y_slice) / self.samplerate *", "class_sound != SILENCE_CLASS: # Find closest sound to this point wav = self._voice.find_wav(self._point_classes,", "import load_model from sklearn.cluster import KMeans import librosa import numpy as np import", "time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not", "and not is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]] else: slices = slice_audio(frames,", "is None: reference_voice = voice else: voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes)", "categorize slices for y in slices: y_slice = y[0] # Calculate MFCCs try:", "len(onsets), len(slices))) # Analyze and categorize slices for y in slices: y_slice =", "model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph", "our queue self._wavs = self._wavs[1:] def tick(self): \"\"\"Main routine for live sessions\"\"\" #", "/ self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point into given voice PCA space", "len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] # Check if we already have enough", "self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs) > 1: # Get next", "@property def temperature(self): return self._temperature @temperature.setter def temperature(self, value): with self._lock: self._temperature =", "penalty = self.seq_len * self.penalty if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] #", "# Start reading audio signal _input self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n')", "via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break # Reweight the", "cut it if necessary penalty = self.seq_len * self.penalty if len(self._sequence) > penalty:", "Do not do anything when this is silence if self._voice.version == 1 or", "Detect onsets in available data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set", "== 1 or class_sound != SILENCE_CLASS: # Find closest sound to this point", "if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence) while True:", "self._wavs.append(wav) max_index -= 1 # Remove oldest event from sequence queue self._sequence =", "Version >1: Do not do anything when this is silence if self._voice.version ==", "Delay playing the sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm)", "np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) #", "slices: y_slice = y[0] # Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except", "if len(frames) == 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect", "# Load model & make it ready for being used in another thread", "len(slices))) # Analyze and categorize slices for y in slices: y_slice = y[0]", "stop(self): self._audio.stop() self.is_running = False def run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with", "samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..')", "self.samplerate, self.threshold_db) # Set a density based on amount of onsets self._density =", "<gh_stars>10-100 import os import random import threading import time from keras.models import load_model", "np import tensorflow as tf from tomomibot.audio import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features,", "= 0.1 # Check .wav queue interval (in seconds) MAX_DENSITY_ONSETS = 10 #", "& make it ready for being used in another thread model_name = '{}.h5'.format(model)", "True: # Play all possible subsequences min_index = max_index - self.seq_len if min_index", "voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of", "True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True self._lock = threading.Lock() # Prepare", "possible subsequences min_index = max_index - self.seq_len if min_index < 0: break sequence_slice", "in available data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density", "len(self._wavs), self._density)) # Delay playing the sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP)", "is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from", "MFCC analysis') else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice) / self._voice.rms_max rms =", "Read current frame buffer from input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) ==", "sample from our queue self._wavs = self._wavs[1:] def tick(self): \"\"\"Main routine for live", "model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() #", "return if len(self._wavs) > 1: # Get next wav file to play from", "'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means clustering if reference_voice is", "import random import threading import time from keras.models import load_model from sklearn.cluster import", "= True self._lock = threading.Lock() # Prepare playing logic self._sequence = [] self._wavs", "# Check if we already have enough data to do something if len(self._sequence)", "= [] def start(self): self.is_running = True # Start reading audio signal _input", "self._density time.sleep(rdm) # Play it! self._audio.play(wav) # Remove the played sample from our", "during performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio I/O try:", "queue wav = self._wavs[0] self.ctx.vlog( '▶ play .wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav),", "for live sessions\"\"\" # Read current frame buffer from input signal frames =", "MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into parts when possible slices =", "Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample", "work with dynamics etc. if self._voice.version == 2: smiley = '☺' if wav", "interval (in seconds) MAX_DENSITY_ONSETS = 10 # How many offsets for max density", "import (AudioIO, slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from", "# Get the classes of the voice sound material / points point_classes =", "onsets detected & {} slices generated'.format( len(onsets), len(slices))) # Analyze and categorize slices", "# Read current frame buffer from input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames)", "except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads self._thread =", "tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class,", "return self._interval @interval.setter def interval(self, value): with self._lock: self._interval = value @property def", "try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample data for", "= threading.Lock() # Prepare playing logic self._sequence = [] self._wavs = [] self._density", "self.is_running = True # Start reading audio signal _input self._audio.start() # Start threads", "able to work with dynamics etc. if self._voice.version == 2: smiley = '☺'", "of the voice sound material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = []", "= self._sequence[penalty:] # Check if we already have enough data to do something", "= len(self._sequence) while True: # Play all possible subsequences min_index = max_index -", "if self.is_running: with self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running:", "many offsets for max density PLAY_DELAY_EXP = 5 # Exponent for maximum density", "np.sum(result) == 0: break # Reweight the softmax distribution result_reweighted = reweight_distribution(result, self._temperature)", "_ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density based on amount of", "concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=())", "queue self._wavs = self._wavs[1:] def tick(self): \"\"\"Main routine for live sessions\"\"\" # Read", "self._sequence[min_index:max_index] # Predict next action via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) ==", "given {}, but ' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice and k-means", "mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample data for MFCC analysis') else:", "master_volume(self): return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume = value @property def interval(self):", "= np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames))))", "> penalty: self._sequence = self._sequence[penalty:] # Check if we already have enough data", "self._voice.version == 2: smiley = '☺' if wav else '☹' self.ctx.vlog('{} find sound", "model & make it ready for being used in another thread model_name =", "def temperature(self, value): with self._lock: self._temperature = value def reset_sequence(self): with self._lock: self._sequence", "duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -= 1 # Remove", "generated'.format( len(onsets), len(slices))) # Analyze and categorize slices for y in slices: y_slice", "max_index = len(self._sequence) while True: # Play all possible subsequences min_index = max_index", "..') # Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread", "value): self._audio.volume = value @property def interval(self): return self._interval @interval.setter def interval(self, value):", "= KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of the voice sound material /", "Percentage of chance for resetting sequence class Session(): def __init__(self, ctx, voice, model,", "sequence class Session(): def __init__(self, ctx, voice, model, reference_voice=None, **kwargs): self.ctx = ctx", "= 0.1 # Percentage of chance for resetting sequence class Session(): def __init__(self,", "(volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in available data onsets, _ = detect_onsets(frames,", "0: break sequence_slice = self._sequence[min_index:max_index] # Predict next action via model result =", "self.use_dynamics, self.use_durations) # Version >1: Do not do anything when this is silence", "'Not enough sample data for MFCC analysis') else: # Calculate RMS rms_data =", "with self._lock: self.tick() def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if", "show this when able to work with dynamics etc. if self._voice.version == 2:", "self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running = False def run(self): while self.is_running:", "temperature(self): return self._temperature @temperature.setter def temperature(self, value): with self._lock: self._temperature = value def", "from sklearn.cluster import KMeans import librosa import numpy as np import tensorflow as", "/ MAX_DENSITY_ONSETS # Slice audio into parts when possible slices = [] if", "feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it to our", "with self._graph.as_default(): max_index = len(self._sequence) while True: # Play all possible subsequences min_index", "= kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate =", "# Slice audio into parts when possible slices = [] if len(onsets) ==", "= decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1: Do not do anything", "import numpy as np import tensorflow as tf from tomomibot.audio import (AudioIO, slice_audio,", "tf.get_default_graph() # Calculate number of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes", "if self._voice.version == 1 or class_sound != SILENCE_CLASS: # Find closest sound to", "= kwargs.get('temperature') # Prepare audio I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'),", "1 or class_sound != SILENCE_CLASS: # Find closest sound to this point wav", "data to do something if len(self._sequence) < self.seq_len: self.ctx.vlog('') return with self._graph.as_default(): max_index", "not do anything when this is silence if self._voice.version == 1 or class_sound", "next wav file to play from queue wav = self._wavs[0] self.ctx.vlog( '▶ play", "closest sound to this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only", "class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it to our sequence queue self._sequence.append(feature_vector)", "0: break # Reweight the softmax distribution result_reweighted = reweight_distribution(result, self._temperature) result_class =", "self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self): self._audio.stop() self.is_running = False def run(self): while self.is_running: time.sleep(self._interval)", "self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate number of total classes num_classes = get_num_classes(self.num_sound_classes,", "os import random import threading import time from keras.models import load_model from sklearn.cluster", "sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the sample a", "(queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing the sample a little bit", "y_slice = y[0] # Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError:", "self._voice.rms_max rms = np.float32(np.max(rms_data)).item() # Project point into given voice PCA space point", "= encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) # Add it to our sequence", "sequences, cut it if necessary penalty = self.seq_len * self.penalty if len(self._sequence) >", "long sequences, cut it if necessary penalty = self.seq_len * self.penalty if len(self._sequence)", "'☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if", "class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -= 1 # Remove oldest event", "self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show this when able to work with", "= self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0: break # Reweight the softmax distribution result_reweighted", "Calculate number of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1]", "= 10 # How many offsets for max density PLAY_DELAY_EXP = 5 #", "self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets in available data onsets,", "duration class duration = len(y_slice) / self.samplerate * 1000 class_duration = encode_duration_class(duration) #", "= [] self._density = 0.0 self.is_running = False # Load model & make", "# Exponent for maximum density delay RESET_PROPABILITY = 0.1 # Percentage of chance", "play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs) > 1: #", "class_duration) # Only show this when able to work with dynamics etc. if", "time from keras.models import load_model from sklearn.cluster import KMeans import librosa import numpy", "sound material / points point_classes = self._kmeans.predict(self._voice.points) self._point_classes = [] for idx in", "self._density = 0.0 self.is_running = False # Load model & make it ready", "anything when this is silence if self._voice.version == 1 or class_sound != SILENCE_CLASS:", "event from sequence queue self._sequence = self._sequence[1:] if random.random() < RESET_PROPABILITY: self._sequence =", "= self._sequence[min_index:max_index] # Predict next action via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result)", "point class_sound = self._kmeans.predict([point])[0] # Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) #", "performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio I/O try: self._audio", "== 2: smiley = '☺' if wav else '☹' self.ctx.vlog('{} find sound (class={},", "== 0 and not is_silent(frames, self.threshold_db): slices = [[frames, 0, 0]] else: slices", "= detect_onsets(frames, self.samplerate, self.threshold_db) # Set a density based on amount of onsets", "a different ' 'amount of classes: given {}, but ' 'should be {}.'.format(num_classes,", "run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def play(self): while self.is_running:", "for maximum density delay RESET_PROPABILITY = 0.1 # Percentage of chance for resetting", "self.ctx.vlog('{} onsets detected & {} slices generated'.format( len(onsets), len(slices))) # Analyze and categorize", "rms = np.float32(np.max(rms_data)).item() # Project point into given voice PCA space point =", "for y in slices: y_slice = y[0] # Calculate MFCCs try: mfcc =", "librosa import numpy as np import tensorflow as tf from tomomibot.audio import (AudioIO,", "self._graph = tf.get_default_graph() # Calculate number of total classes num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics,", "' 'amount of classes: given {}, but ' 'should be {}.'.format(num_classes, num_model_classes)) #", "classes: given {}, but ' 'should be {}.'.format(num_classes, num_model_classes)) # Prepare voice and", "playing the sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) #", "detected & {} slices generated'.format( len(onsets), len(slices))) # Analyze and categorize slices for", "delay RESET_PROPABILITY = 0.1 # Percentage of chance for resetting sequence class Session():", "necessary penalty = self.seq_len * self.penalty if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:]", "# Check for too long sequences, cut it if necessary penalty = self.seq_len", "def reset_sequence(self): with self._lock: self._sequence = [] def start(self): self.is_running = True #", "reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 #", "# Prepare playing logic self._sequence = [] self._wavs = [] self._density = 0.0", "time.sleep(rdm) # Play it! self._audio.play(wav) # Remove the played sample from our queue", "class class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration class duration = len(y_slice) /", "queue interval (in seconds) MAX_DENSITY_ONSETS = 10 # How many offsets for max", "'☺' if wav else '☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley,", "self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate", "# How many offsets for max density PLAY_DELAY_EXP = 5 # Exponent for", "Check if we already have enough data to do something if len(self._sequence) <", "rms) # Get duration class duration = len(y_slice) / self.samplerate * 1000 class_duration", "of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into", "# Predict next action via model result = self._model.predict(np.array([sequence_slice])) if np.sum(result) == 0:", "classes class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version >1:", "amount of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio", "= [[frames, 0, 0]] else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected", "@property def interval(self): return self._interval @interval.setter def interval(self, value): with self._lock: self._interval =", "in another thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model =", "the played sample from our queue self._wavs = self._wavs[1:] def tick(self): \"\"\"Main routine", "= 0.0 self.is_running = False # Load model & make it ready for", "= kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters can be", "get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given model", "mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils", "self.ctx.vlog( '▶ play .wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay", "another thread model_name = '{}.h5'.format(model) model_path = os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path)", "self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume = value @property def interval(self): return self._interval", "parameters can be changed during performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') #", "-= 1 # Remove oldest event from sequence queue self._sequence = self._sequence[1:] if", "[] for idx in range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\"", "class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration class duration = len(y_slice) / self.samplerate", "into sub classes class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) #", "slice_audio, detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import", "encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes, class_sound, class_dynamic, class_duration, self.use_dynamics, self.use_durations) #", "when possible slices = [] if len(onsets) == 0 and not is_silent(frames, self.threshold_db):", "device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare", "self.ctx.vlog('') return with self._graph.as_default(): max_index = len(self._sequence) while True: # Play all possible", "self._audio.stop() self.is_running = False def run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock:", "self._lock: self._sequence = [] def start(self): self.is_running = True # Start reading audio", "while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs) > 1: # Get", "(get_num_classes, encode_duration_class, encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav queue interval", "self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') #", "max density PLAY_DELAY_EXP = 5 # Exponent for maximum density delay RESET_PROPABILITY =", "SILENCE_CLASS: # Find closest sound to this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic,", "encode_dynamic_class(class_sound, rms) # Get duration class duration = len(y_slice) / self.samplerate * 1000", "kwargs.get('threshold') # These parameters can be changed during performance self._interval = kwargs.get('interval') self._temperature", "= reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode class back into sub classes", "reading audio signal _input self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def stop(self):", "self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given model was trained", "kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold')", "= self._kmeans.predict([point])[0] # Get dynamic class class_dynamic = encode_dynamic_class(class_sound, rms) # Get duration", "np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name, len(voice.points))) @property def", "Add it to our sequence queue self._sequence.append(feature_vector) # Check for too long sequences,", "density delay RESET_PROPABILITY = 0.1 # Percentage of chance for resetting sequence class", "y[0] # Calculate MFCCs try: mfcc = mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not", "= get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given", "= mfcc_features(y_slice, self.samplerate) except RuntimeError: self.ctx.vlog( 'Not enough sample data for MFCC analysis')", "import os import random import threading import time from keras.models import load_model from", "seconds) MAX_DENSITY_ONSETS = 10 # How many offsets for max density PLAY_DELAY_EXP =", "I/O try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError", "def run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick() def play(self): while", "= False # Load model & make it ready for being used in", "result_reweighted = reweight_distribution(result, self._temperature) result_class = np.argmax(result_reweighted) # Decode class back into sub", "audio into parts when possible slices = [] if len(onsets) == 0 and", "sound to this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration) # Only show", "detect_onsets, is_silent, mfcc_features, get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution", "reference_voice = voice else: voice.fit(reference_voice) self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) #", "# Remove oldest event from sequence queue self._sequence = self._sequence[1:] if random.random() <", "if reference_voice is None: reference_voice = voice else: voice.fit(reference_voice) self._voice = voice self._kmeans", "len(frames) == 0: return self.ctx.vlog('Read {0} frames (volume={1:.2f}dB)'.format( len(frames), np.max(get_db(frames)))) # Detect onsets", "self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err:", "self.use_dynamics, self.use_durations) # Add it to our sequence queue self._sequence.append(feature_vector) # Check for", "reset_sequence(self): with self._lock: self._sequence = [] def start(self): self.is_running = True # Start", "of chance for resetting sequence class Session(): def __init__(self, ctx, voice, model, reference_voice=None,", "= kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio I/O try: self._audio = AudioIO(ctx,", "= threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread = threading.Thread(target=self.play, args=()) self._play_thread.daemon = True", "self._audio.volume = value @property def interval(self): return self._interval @interval.setter def interval(self, value): with", "playing logic self._sequence = [] self._wavs = [] self._density = 0.0 self.is_running =", "self.is_running = False def run(self): while self.is_running: time.sleep(self._interval) if self.is_running: with self._lock: self.tick()", "if we already have enough data to do something if len(self._sequence) < self.seq_len:", "self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The given model was", "self.threshold_db = kwargs.get('threshold') # These parameters can be changed during performance self._interval =", "self._voice = voice self._kmeans = KMeans(n_clusters=self.num_sound_classes) self._kmeans.fit(reference_voice.points) # Get the classes of the", "encode_dynamic_class, encode_feature_vector, decode_classes) CHECK_WAV_INTERVAL = 0.1 # Check .wav queue interval (in seconds)", "break sequence_slice = self._sequence[min_index:max_index] # Predict next action via model result = self._model.predict(np.array([sequence_slice]))", "self.use_durations) # Version >1: Do not do anything when this is silence if", "self._graph.as_default(): max_index = len(self._sequence) while True: # Play all possible subsequences min_index =", "= AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as err: self.ctx.elog(err)", "random import threading import time from keras.models import load_model from sklearn.cluster import KMeans", "Play it! self._audio.play(wav) # Remove the played sample from our queue self._wavs =", "= [] for idx in range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice", "smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -= 1 # Remove oldest", "def temperature(self): return self._temperature @temperature.setter def temperature(self, value): with self._lock: self._temperature = value", "'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration)) if wav: self._wavs.append(wav) max_index -= 1 #", "get_db) from tomomibot.const import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils import", "len(self._sequence) while True: # Play all possible subsequences min_index = max_index - self.seq_len", "= kwargs.get('threshold') # These parameters can be changed during performance self._interval = kwargs.get('interval')", "voice PCA space point = self._voice.project([mfcc])[0].flatten() # Predict k-means class from point class_sound", "self.ctx.elog(err) self.ctx.log('Loading ..') # Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon =", "sequence queue self._sequence = self._sequence[1:] if random.random() < RESET_PROPABILITY: self._sequence = [] self.ctx.vlog('')", "result_class = np.argmax(result_reweighted) # Decode class back into sub classes class_sound, class_dynamic, class_duration", "# Delay playing the sample a little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density", "onsets in available data onsets, _ = detect_onsets(frames, self.samplerate, self.threshold_db) # Set a", "def play(self): while self.is_running: time.sleep(CHECK_WAV_INTERVAL) if not self.is_running: return if len(self._wavs) > 1:", "from input signal frames = np.array(self._audio.read_frames()).flatten() if len(frames) == 0: return self.ctx.vlog('Read {0}", "len(self._wavs) > 1: # Get next wav file to play from queue wav", "**kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations')", "def interval(self, value): with self._lock: self._interval = value @property def temperature(self): return self._temperature", "kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters", "try: self._audio = AudioIO(ctx, samplerate=self.samplerate, device_in=kwargs.get('input_device'), device_out=kwargs.get('output_device'), channel_in=kwargs.get('input_channel'), channel_out=kwargs.get('output_channel'), volume=kwargs.get('volume')) except RuntimeError as", "slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets detected & {} slices generated'.format( len(onsets),", "\"\"\"Main routine for live sessions\"\"\" # Read current frame buffer from input signal", "num_model_classes)) # Prepare voice and k-means clustering if reference_voice is None: reference_voice =", "reference_voice is None: reference_voice = voice else: voice.fit(reference_voice) self._voice = voice self._kmeans =", "return self._audio.volume @master_volume.setter def master_volume(self, value): self._audio.volume = value @property def interval(self): return", "possible slices = [] if len(onsets) == 0 and not is_silent(frames, self.threshold_db): slices", "little bit rdm = random.expovariate(PLAY_DELAY_EXP) * self._density time.sleep(rdm) # Play it! self._audio.play(wav) #", "# Prepare concurrent threads self._thread = threading.Thread(target=self.run, args=()) self._thread.daemon = True self._play_thread =", "routine for live sessions\"\"\" # Read current frame buffer from input signal frames", "= np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name, len(voice.points))) @property", "import MODELS_FOLDER, SILENCE_CLASS from tomomibot.train import reweight_distribution from tomomibot.utils import (get_num_classes, encode_duration_class, encode_dynamic_class,", ".wav queue interval (in seconds) MAX_DENSITY_ONSETS = 10 # How many offsets for", "voice, model, reference_voice=None, **kwargs): self.ctx = ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics')", "Prepare playing logic self._sequence = [] self._wavs = [] self._density = 0.0 self.is_running", "= [] self._wavs = [] self._density = 0.0 self.is_running = False # Load", "num_classes = get_num_classes(self.num_sound_classes, self.use_dynamics, self.use_durations) num_model_classes = self._model.layers[-1].output_shape[1] if num_model_classes != num_classes: self.ctx.elog('The", "if self._voice.version == 2: smiley = '☺' if wav else '☹' self.ctx.vlog('{} find", "Get duration class duration = len(y_slice) / self.samplerate * 1000 class_duration = encode_duration_class(duration)", "Start reading audio signal _input self._audio.start() # Start threads self._thread.start() self._play_thread.start() self.ctx.log('Ready!\\n') def", "with self._lock: self._interval = value @property def temperature(self): return self._temperature @temperature.setter def temperature(self,", "self.threshold_db): slices = [[frames, 0, 0]] else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{}", "be changed during performance self._interval = kwargs.get('interval') self._temperature = kwargs.get('temperature') # Prepare audio", "= kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty') self.samplerate = kwargs.get('samplerate') self.seq_len =", "= np.argmax(result_reweighted) # Decode class back into sub classes class_sound, class_dynamic, class_duration =", "self.seq_len = kwargs.get('seq_len') self.threshold_db = kwargs.get('threshold') # These parameters can be changed during", "this is silence if self._voice.version == 1 or class_sound != SILENCE_CLASS: # Find", "= os.path.join(os.getcwd(), MODELS_FOLDER, model_name) self._model = load_model(model_path) self._model._make_predict_function() self._graph = tf.get_default_graph() # Calculate", "self.ctx.log('Voice \"{}\" with {} samples' .format(voice.name, len(voice.points))) @property def master_volume(self): return self._audio.volume @master_volume.setter", "ctx self.num_sound_classes = kwargs.get('num_classes') self.use_dynamics = kwargs.get('dynamics') self.use_durations = kwargs.get('durations') self.penalty = kwargs.get('penalty')", "if min_index < 0: break sequence_slice = self._sequence[min_index:max_index] # Predict next action via", "given model was trained with a different ' 'amount of classes: given {},", "logic self._sequence = [] self._wavs = [] self._density = 0.0 self.is_running = False", "idx in range(num_classes): indices = np.where(point_classes == idx) self._point_classes.append(indices[0]) self.ctx.log('Voice \"{}\" with {}", "into parts when possible slices = [] if len(onsets) == 0 and not", "# Find closest sound to this point wav = self._voice.find_wav(self._point_classes, class_sound, class_dynamic, class_duration)", "self.penalty if len(self._sequence) > penalty: self._sequence = self._sequence[penalty:] # Check if we already", "else '☹' self.ctx.vlog('{} find sound (class={}, ' 'dynamic={}, duration={})'.format( smiley, class_sound, class_dynamic, class_duration))", "a density based on amount of onsets self._density = min( MAX_DENSITY_ONSETS, len(onsets)) /", "slices = [] if len(onsets) == 0 and not is_silent(frames, self.threshold_db): slices =", "k-means class from point class_sound = self._kmeans.predict([point])[0] # Get dynamic class class_dynamic =", "if len(self._wavs) > 1: # Get next wav file to play from queue", "enough sample data for MFCC analysis') else: # Calculate RMS rms_data = librosa.feature.rms(y=y_slice)", "wav = self._wavs[0] self.ctx.vlog( '▶ play .wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs),", "Analyze and categorize slices for y in slices: y_slice = y[0] # Calculate", "it if necessary penalty = self.seq_len * self.penalty if len(self._sequence) > penalty: self._sequence", "with self._lock: self._sequence = [] def start(self): self.is_running = True # Start reading", "self._sequence[penalty:] # Check if we already have enough data to do something if", "'▶ play .wav sample \"{}\" (queue={}, density={})'.format( os.path.basename(wav), len(self._wavs), self._density)) # Delay playing", "# Set a density based on amount of onsets self._density = min( MAX_DENSITY_ONSETS,", "slices = [[frames, 0, 0]] else: slices = slice_audio(frames, onsets, trim=False) self.ctx.vlog('{} onsets", "!= num_classes: self.ctx.elog('The given model was trained with a different ' 'amount of", "value): with self._lock: self._temperature = value def reset_sequence(self): with self._lock: self._sequence = []", "self.threshold_db) # Set a density based on amount of onsets self._density = min(", "= min( MAX_DENSITY_ONSETS, len(onsets)) / MAX_DENSITY_ONSETS # Slice audio into parts when possible", "sub classes class_sound, class_dynamic, class_duration = decode_classes( result_class, self.num_sound_classes, self.use_dynamics, self.use_durations) # Version", "args=()) self._play_thread.daemon = True self._lock = threading.Lock() # Prepare playing logic self._sequence =", "min_index = max_index - self.seq_len if min_index < 0: break sequence_slice = self._sequence[min_index:max_index]", "/ self.samplerate * 1000 class_duration = encode_duration_class(duration) # Encode it! feature_vector = encode_feature_vector(self.num_sound_classes," ]
[ "the input matches a password of one's own choosing, and # prints 'Begone!'", "# prints 'You may enter.' if the input matches a password of one's", "that takes in an input, # prints 'You may enter.' if the input", "enter.' if the input matches a password of one's own choosing, and #", "program that takes in an input, # prints 'You may enter.' if the", "an input, # prints 'You may enter.' if the input matches a password", "input, # prints 'You may enter.' if the input matches a password of", "if the input matches a password of one's own choosing, and # prints", "input matches a password of one's own choosing, and # prints 'Begone!' otherwise.", "in an input, # prints 'You may enter.' if the input matches a", "prints 'You may enter.' if the input matches a password of one's own", "may enter.' if the input matches a password of one's own choosing, and", "# Write a program that takes in an input, # prints 'You may", "takes in an input, # prints 'You may enter.' if the input matches", "'You may enter.' if the input matches a password of one's own choosing,", "a program that takes in an input, # prints 'You may enter.' if", "Write a program that takes in an input, # prints 'You may enter.'" ]
[ "robot action_queue = [ (\"move\", [0, 0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9,", "action_queue # Main loop, perform simulation steps until Webots is stopping the controller", "perform simulation steps until Webots is stopping the controller while robot.step(timestep) != -1:", "Actions for our robot action_queue = [ (\"move\", [0, 0]), \"scan\", (\"move\", [0.9,", "0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0, 0])", "] robot.action_queue = action_queue # Main loop, perform simulation steps until Webots is", "(\"move\", [0, 0]) ] robot.action_queue = action_queue # Main loop, perform simulation steps", "timestep = int(robot.getBasicTimeStep()) # Actions for our robot action_queue = [ (\"move\", [0,", "# Actions for our robot action_queue = [ (\"move\", [0, 0]), \"scan\", (\"move\",", "# Main loop, perform simulation steps until Webots is stopping the controller while", "[0, 0]) ] robot.action_queue = action_queue # Main loop, perform simulation steps until", "Main loop, perform simulation steps until Webots is stopping the controller while robot.step(timestep)", "0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0, 0]) ] robot.action_queue = action_queue # Main", "script for drive_to_pos def main(robot): # Setup timestep = int(robot.getBasicTimeStep()) # Actions for", "\"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]),", "(\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0, 0]) ]", "simulation steps until Webots is stopping the controller while robot.step(timestep) != -1: robot.execute_next_action()", "-0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0, 0]) ] robot.action_queue =", "0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9,", "(\"move\", [0, 0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]),", "loop, perform simulation steps until Webots is stopping the controller while robot.step(timestep) !=", "= [ (\"move\", [0, 0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\",", "for drive_to_pos def main(robot): # Setup timestep = int(robot.getBasicTimeStep()) # Actions for our", "action_queue = [ (\"move\", [0, 0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]),", "Test script for drive_to_pos def main(robot): # Setup timestep = int(robot.getBasicTimeStep()) # Actions", "def main(robot): # Setup timestep = int(robot.getBasicTimeStep()) # Actions for our robot action_queue", "(\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0, 0]) ] robot.action_queue = action_queue", "-0.9]), (\"move\", [0, 0]) ] robot.action_queue = action_queue # Main loop, perform simulation", "[0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0, 0]) ] robot.action_queue", "(\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\",", "robot.action_queue = action_queue # Main loop, perform simulation steps until Webots is stopping", "[0, 0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\",", "[-0.9, -0.9]), (\"move\", [0, 0]) ] robot.action_queue = action_queue # Main loop, perform", "Setup timestep = int(robot.getBasicTimeStep()) # Actions for our robot action_queue = [ (\"move\",", "our robot action_queue = [ (\"move\", [0, 0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\",", "[0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0,", "for our robot action_queue = [ (\"move\", [0, 0]), \"scan\", (\"move\", [0.9, 0.9]),", "[-0.9, 0.9]), (\"move\", [-0.9, -0.9]), (\"move\", [0, 0]) ] robot.action_queue = action_queue #", "= action_queue # Main loop, perform simulation steps until Webots is stopping the", "# Setup timestep = int(robot.getBasicTimeStep()) # Actions for our robot action_queue = [", "# Test script for drive_to_pos def main(robot): # Setup timestep = int(robot.getBasicTimeStep()) #", "[ (\"move\", [0, 0]), \"scan\", (\"move\", [0.9, 0.9]), (\"move\", [0.9, -0.9]), (\"move\", [-0.9,", "(\"move\", [-0.9, -0.9]), (\"move\", [0, 0]) ] robot.action_queue = action_queue # Main loop,", "main(robot): # Setup timestep = int(robot.getBasicTimeStep()) # Actions for our robot action_queue =", "drive_to_pos def main(robot): # Setup timestep = int(robot.getBasicTimeStep()) # Actions for our robot", "0]) ] robot.action_queue = action_queue # Main loop, perform simulation steps until Webots", "= int(robot.getBasicTimeStep()) # Actions for our robot action_queue = [ (\"move\", [0, 0]),", "int(robot.getBasicTimeStep()) # Actions for our robot action_queue = [ (\"move\", [0, 0]), \"scan\"," ]
[ "F401 # Flask app configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random()))", "pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task')", "not_found(e): return render_template('index.html') # Actually run the application if __name__ == '__main__': app.run(port=8080,", "api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/')", "Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) #", "index(): return render_template('index.html') # Basically, if we don't hit an API call, we'll", "we don't hit an API call, we'll redirect to the react app @app.errorhandler(404)", "'/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') # Basically, if we don't", "app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection mongo_client", "to the react app @app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually run the", "'CONNECTION_STRING', None)) # Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource,", "database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api = Api(app)", "EXPERIMENT_TYPES # noqa: F401 # Flask app configuration app = Flask(__name__) app.config['SECRET_KEY'] =", "noqa: F401 # Flask app configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY',", "Api from asch.config import Config from asch.server.resources import * from experiments import EXPERIMENT_TYPES", "configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection", "hit an API call, we'll redirect to the react app @app.errorhandler(404) def not_found(e):", "import random import pymongo from flask import Flask, render_template from flask_restful import Api", "import * from experiments import EXPERIMENT_TYPES # noqa: F401 # Flask app configuration", "mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play')", "@app.route('/') def index(): return render_template('index.html') # Basically, if we don't hit an API", "API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource,", "api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index():", "react app @app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually run the application if", "import Config from asch.server.resources import * from experiments import EXPERIMENT_TYPES # noqa: F401", "we'll redirect to the react app @app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually", "'/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') #", "Flask, render_template from flask_restful import Api from asch.config import Config from asch.server.resources import", "API call, we'll redirect to the react app @app.errorhandler(404) def not_found(e): return render_template('index.html')", "= Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource,", "'/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') # Basically, if we don't hit an", "redirect to the react app @app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually run", "# noqa: F401 # Flask app configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask',", "from flask_restful import Api from asch.config import Config from asch.server.resources import * from", "from asch.server.resources import * from experiments import EXPERIMENT_TYPES # noqa: F401 # Flask", "# Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api", "Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished')", "return render_template('index.html') # Basically, if we don't hit an API call, we'll redirect", "@app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually run the application if __name__ ==", "app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING',", "None)) # Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants')", "'/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def", "* from experiments import EXPERIMENT_TYPES # noqa: F401 # Flask app configuration app", "from flask import Flask, render_template from flask_restful import Api from asch.config import Config", "api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') # Basically, if we", "<filename>asch/server/server.py import random import pymongo from flask import Flask, render_template from flask_restful import", "# Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource,", "'/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session')", "Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database',", "Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api =", "= Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection mongo_client =", "= pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource,", "experiments import EXPERIMENT_TYPES # noqa: F401 # Flask app configuration app = Flask(__name__)", "don't hit an API call, we'll redirect to the react app @app.errorhandler(404) def", "if we don't hit an API call, we'll redirect to the react app", "api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource,", "app @app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually run the application if __name__", "str(random.random())) # Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API", "pymongo from flask import Flask, render_template from flask_restful import Api from asch.config import", "api = Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download')", "render_template('index.html') # Basically, if we don't hit an API call, we'll redirect to", "api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') # Basically,", "the react app @app.errorhandler(404) def not_found(e): return render_template('index.html') # Actually run the application", "app configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database", "render_template from flask_restful import Api from asch.config import Config from asch.server.resources import *", "Flask app configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup", "flask_restful import Api from asch.config import Config from asch.server.resources import * from experiments", "connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup API api = Api(app) api.add_resource(PlayAPIResource,", "= Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) # Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None))", "'SECRET_KEY', str(random.random())) # Setup database connection mongo_client = pymongo.MongoClient(Config.get_or_else('database', 'CONNECTION_STRING', None)) # Setup", "Basically, if we don't hit an API call, we'll redirect to the react", "import Api from asch.config import Config from asch.server.resources import * from experiments import", "from asch.config import Config from asch.server.resources import * from experiments import EXPERIMENT_TYPES #", "asch.config import Config from asch.server.resources import * from experiments import EXPERIMENT_TYPES # noqa:", "return render_template('index.html') # Actually run the application if __name__ == '__main__': app.run(port=8080, debug=True)", "import pymongo from flask import Flask, render_template from flask_restful import Api from asch.config", "import Flask, render_template from flask_restful import Api from asch.config import Config from asch.server.resources", "def not_found(e): return render_template('index.html') # Actually run the application if __name__ == '__main__':", "# Basically, if we don't hit an API call, we'll redirect to the", "def index(): return render_template('index.html') # Basically, if we don't hit an API call,", "api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') # Basically, if we don't hit", "# Flask app configuration app = Flask(__name__) app.config['SECRET_KEY'] = Config.get_or_else('flask', 'SECRET_KEY', str(random.random())) #", "an API call, we'll redirect to the react app @app.errorhandler(404) def not_found(e): return", "flask import Flask, render_template from flask_restful import Api from asch.config import Config from", "Api(app) api.add_resource(PlayAPIResource, '/api/v0/play') api.add_resource(UnityTaskAPIResource, '/api/v0/unity/task') api.add_resource(ParticipantViewAPIResource, '/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login')", "from experiments import EXPERIMENT_TYPES # noqa: F401 # Flask app configuration app =", "api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html')", "'/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return render_template('index.html') # Basically, if", "asch.server.resources import * from experiments import EXPERIMENT_TYPES # noqa: F401 # Flask app", "random import pymongo from flask import Flask, render_template from flask_restful import Api from", "'/api/v0/participants') api.add_resource(ParticipantFinishedAPIResource, '/api/v0/particpants/finished') api.add_resource(DownloadParticipantDataAPIResource, '/api/v0/data/download') api.add_resource(LoginAPIResource, '/api/v0/login') api.add_resource(LoginValidateAPIResource, '/api/v0/validate_session') @app.route('/') def index(): return", "import EXPERIMENT_TYPES # noqa: F401 # Flask app configuration app = Flask(__name__) app.config['SECRET_KEY']", "Config from asch.server.resources import * from experiments import EXPERIMENT_TYPES # noqa: F401 #", "call, we'll redirect to the react app @app.errorhandler(404) def not_found(e): return render_template('index.html') #" ]
[ "up param_dict self.param_dict = {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] =", "1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt,", "from perform.time_integrator.time_integrator import TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict self.param_dict", "set up param_dict self.param_dict = {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"]", "= 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict)", "= 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7) self.assertEqual(time_integrator.time_scheme, \"bdf\") self.assertEqual(time_integrator.time_order, 2)", "setUp(self): # set up param_dict self.param_dict = {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] =", "{} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator", "# set up param_dict self.param_dict = {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\"", "2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7) self.assertEqual(time_integrator.time_scheme, \"bdf\") self.assertEqual(time_integrator.time_order, 2) self.assertEqual(time_integrator.subiter,", "\"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7) self.assertEqual(time_integrator.time_scheme, \"bdf\")", "= \"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7) self.assertEqual(time_integrator.time_scheme,", "perform.time_integrator.time_integrator import TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict self.param_dict =", "TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict self.param_dict = {} self.param_dict[\"dt\"]", "self.param_dict = {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2 def", "import TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict self.param_dict = {}", "def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7) self.assertEqual(time_integrator.time_scheme, \"bdf\") self.assertEqual(time_integrator.time_order, 2) self.assertEqual(time_integrator.subiter, 0)", "TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict self.param_dict = {} self.param_dict[\"dt\"] = 1e-7", "def setUp(self): # set up param_dict self.param_dict = {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"]", "class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict self.param_dict = {} self.param_dict[\"dt\"] =", "self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator =", "param_dict self.param_dict = {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2", "<reponame>cwentland0/perform import unittest from perform.time_integrator.time_integrator import TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set", "self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7)", "unittest from perform.time_integrator.time_integrator import TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up param_dict", "= {} self.param_dict[\"dt\"] = 1e-7 self.param_dict[\"time_scheme\"] = \"bdf\" self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self):", "self.param_dict[\"time_order\"] = 2 def test_time_integrator_init(self): time_integrator = TimeIntegrator(self.param_dict) self.assertEqual(time_integrator.dt, 1e-7) self.assertEqual(time_integrator.time_scheme, \"bdf\") self.assertEqual(time_integrator.time_order,", "import unittest from perform.time_integrator.time_integrator import TimeIntegrator class TimeIntegratorTestCase(unittest.TestCase): def setUp(self): # set up" ]
[ "3, pool = 'cls', in_channels = 3, dim_head = 64, dropout = 0.,", "%.3fM' % parameters) out = model(img) print(\"Shape of out :\", out.shape) # [B,", "= nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head", "% patch_size == 0, 'Image dimensions must be divisible by the patch size.'", "b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d -> b n d', b=b) x", "3, 224, 224]).cuda() model = ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda p:", "x = rearrange(x[:, 0], '(b t) ... -> b t ...', b=b) cls_temporal_tokens", "x = rearrange(x, 'b t n d -> (b t) n d') x", "depth, heads, dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm =", "self.temporal_transformer(x) x = x.mean(dim = 1) if self.pool == 'mean' else x[:, 0]", "parameters = sum([np.prod(p.size()) for p in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' %", "p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1,", "mlp_dim, dropout = dropout)) ])) def forward(self, x): for attn, ff in self.layers:", "t) ... -> b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d", "print('Trainable Parameters: %.3fM' % parameters) out = model(img) print(\"Shape of out :\", out.shape)", "\"__main__\": img = torch.ones([1, 16, 3, 224, 224]).cuda() model = ViViT(224, 16, 100,", "x = ff(x) + x return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size,", "// patch_size) ** 2 patch_dim = in_channels * patch_size ** 2 self.to_patch_embedding =", "= nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout", "0, 'Image dimensions must be divisible by the patch size.' num_patches = (image_size", "x = self.dropout(x) x = rearrange(x, 'b t n d -> (b t)", "dim_head = 64, dropout = 0., emb_dropout = 0., scale_dim = 4, ):", "_ = x.shape cls_space_tokens = repeat(self.space_token, '() n d -> b t n", "= nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def", ") def forward(self, x): x = self.to_patch_embedding(x) b, t, n, _ = x.shape", "...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d -> b n d', b=b)", "self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames, dim = 192, depth", "= torch.ones([1, 16, 3, 224, 224]).cuda() model = ViViT(224, 16, 100, 16).cuda() parameters", "ff(x) + x return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames,", "nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer =", ":(n + 1)] x = self.dropout(x) x = rearrange(x, 'b t n d", "== 'mean' else x[:, 0] return self.mlp_head(x) if __name__ == \"__main__\": img =", "dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ])) def forward(self,", "b n d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x", "n d -> b n d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x", "depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim,", "d -> b n d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x =", "import Rearrange from module import Attention, PreNorm, FeedForward import numpy as np class", "def __init__(self, image_size, patch_size, num_classes, num_frames, dim = 192, depth = 4, heads", "in_channels = 3, dim_head = 64, dropout = 0., emb_dropout = 0., scale_dim", "t, n, _ = x.shape cls_space_tokens = repeat(self.space_token, '() n d -> b", "self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout)", "super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim,", "= sum([np.prod(p.size()) for p in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' % parameters)", "n d -> (b t) n d') x = self.space_transformer(x) x = rearrange(x[:,", "b t n d', b = b, t=t) x = torch.cat((cls_space_tokens, x), dim=2)", "num_classes, num_frames, dim = 192, depth = 4, heads = 3, pool =", "rearrange, repeat from einops.layers.torch import Rearrange from module import Attention, PreNorm, FeedForward import", "x return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames, dim =", "= 3, pool = 'cls', in_channels = 3, dim_head = 64, dropout =", "x): for attn, ff in self.layers: x = attn(x) + x x =", "t n d', b = b, t=t) x = torch.cat((cls_space_tokens, x), dim=2) x", "PreNorm(dim, Attention(dim, heads = heads, dim_head = dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim,", "in {'cls', 'mean'}, 'pool type must be either cls (cls token) or mean", "must be either cls (cls token) or mean (mean pooling)' assert image_size %", "16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p in", "1, dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1,", "0], '(b t) ... -> b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '()", "= 3, dim_head = 64, dropout = 0., emb_dropout = 0., scale_dim =", "in_channels * patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h p1)", "'b t n d -> (b t) n d') x = self.space_transformer(x) x", "p2) -> b t (h w) (p1 p2 c)', p1 = patch_size, p2", "forward(self, x): x = self.to_patch_embedding(x) b, t, n, _ = x.shape cls_space_tokens =", "c (h p1) (w p2) -> b t (h w) (p1 p2 c)',", "self.mlp_head(x) if __name__ == \"__main__\": img = torch.ones([1, 16, 3, 224, 224]).cuda() model", "(cls token) or mean (mean pooling)' assert image_size % patch_size == 0, 'Image", "= 4, heads = 3, pool = 'cls', in_channels = 3, dim_head =", "dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1,", "b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d -> b n", "-> b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d -> b", "dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim,", "(h p1) (w p2) -> b t (h w) (p1 p2 c)', p1", "1, dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout)", "self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h p1) (w p2) -> b t", "= nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x): x = self.to_patch_embedding(x) b,", "= torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x = x.mean(dim = 1) if", "self.to_patch_embedding(x) b, t, n, _ = x.shape cls_space_tokens = repeat(self.space_token, '() n d", "Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head", "t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d -> b n d',", "self.dropout(x) x = rearrange(x, 'b t n d -> (b t) n d')", "dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth, heads,", "parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p in parameters])", "n d', b = b, t=t) x = torch.cat((cls_space_tokens, x), dim=2) x +=", "100, 16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p", "self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim))", "x.mean(dim = 1) if self.pool == 'mean' else x[:, 0] return self.mlp_head(x) if", "= Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer", "n d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x =", "patch_size == 0, 'Image dimensions must be divisible by the patch size.' num_patches", "num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth,", "= self.to_patch_embedding(x) b, t, n, _ = x.shape cls_space_tokens = repeat(self.space_token, '() n", "nn.Sequential( Rearrange('b t c (h p1) (w p2) -> b t (h w)", "einsum import torch.nn.functional as F from einops import rearrange, repeat from einops.layers.torch import", "1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head,", "model = ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters", "Attention(dim, heads = heads, dim_head = dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim,", "= dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ])) def forward(self, x): for", "ff in self.layers: x = attn(x) + x x = ff(x) + x", "forward(self, x): for attn, ff in self.layers: x = attn(x) + x x", ":, :(n + 1)] x = self.dropout(x) x = rearrange(x, 'b t n", "(w p2) -> b t (h w) (p1 p2 c)', p1 = patch_size,", "__name__ == \"__main__\": img = torch.ones([1, 16, 3, 224, 224]).cuda() model = ViViT(224,", "patch_size, num_classes, num_frames, dim = 192, depth = 4, heads = 3, pool", "x = x.mean(dim = 1) if self.pool == 'mean' else x[:, 0] return", "self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout)", "self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x): x = self.to_patch_embedding(x)", "= x.shape cls_space_tokens = repeat(self.space_token, '() n d -> b t n d',", "range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head = dim_head, dropout = dropout)),", "* patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h p1) (w", "PreNorm, FeedForward import numpy as np class Transformer(nn.Module): def __init__(self, dim, depth, heads,", "patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token", "self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x):", "FeedForward(dim, mlp_dim, dropout = dropout)) ])) def forward(self, x): for attn, ff in", "image_size % patch_size == 0, 'Image dimensions must be divisible by the patch", "return self.mlp_head(x) if __name__ == \"__main__\": img = torch.ones([1, 16, 3, 224, 224]).cuda()", "4, ): super().__init__() assert pool in {'cls', 'mean'}, 'pool type must be either", "224, 224]).cuda() model = ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda p: p.requires_grad,", "w) (p1 p2 c)', p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), )", "Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer =", "must be divisible by the patch size.' num_patches = (image_size // patch_size) **", "4, heads = 3, pool = 'cls', in_channels = 3, dim_head = 64,", "= Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool", "heads = heads, dim_head = dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout", "x = torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :, :(n + 1)] x", "+ 1)] x = self.dropout(x) x = rearrange(x, 'b t n d ->", "'(b t) ... -> b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n", "= ff(x) + x return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes,", "type must be either cls (cls token) or mean (mean pooling)' assert image_size", "d -> b t n d', b = b, t=t) x = torch.cat((cls_space_tokens,", "'cls', in_channels = 3, dim_head = 64, dropout = 0., emb_dropout = 0.,", "if __name__ == \"__main__\": img = torch.ones([1, 16, 3, 224, 224]).cuda() model =", "x[:, 0] return self.mlp_head(x) if __name__ == \"__main__\": img = torch.ones([1, 16, 3,", "dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim,", "(h w) (p1 p2 c)', p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim, dim),", "def forward(self, x): x = self.to_patch_embedding(x) b, t, n, _ = x.shape cls_space_tokens", "patch_size) ** 2 patch_dim = in_channels * patch_size ** 2 self.to_patch_embedding = nn.Sequential(", "F from einops import rearrange, repeat from einops.layers.torch import Rearrange from module import", "ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames, dim = 192, depth = 4,", "= 0., scale_dim = 4, ): super().__init__() assert pool in {'cls', 'mean'}, 'pool", "pool = 'cls', in_channels = 3, dim_head = 64, dropout = 0., emb_dropout", "dropout = 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in", "rearrange(x, 'b t n d -> (b t) n d') x = self.space_transformer(x)", "'mean' else x[:, 0] return self.mlp_head(x) if __name__ == \"__main__\": img = torch.ones([1,", "import nn, einsum import torch.nn.functional as F from einops import rearrange, repeat from", "+ 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth, heads,", "n, _ = x.shape cls_space_tokens = repeat(self.space_token, '() n d -> b t", "dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for", "cls (cls token) or mean (mean pooling)' assert image_size % patch_size == 0,", "the patch size.' num_patches = (image_size // patch_size) ** 2 patch_dim = in_channels", "(image_size // patch_size) ** 2 patch_dim = in_channels * patch_size ** 2 self.to_patch_embedding", "b, t, n, _ = x.shape cls_space_tokens = repeat(self.space_token, '() n d ->", "nn, einsum import torch.nn.functional as F from einops import rearrange, repeat from einops.layers.torch", "self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim))", "= dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ])) def", "super().__init__() assert pool in {'cls', 'mean'}, 'pool type must be either cls (cls", "64, dropout = 0., emb_dropout = 0., scale_dim = 4, ): super().__init__() assert", "16, 3, 224, 224]).cuda() model = ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda", "size.' num_patches = (image_size // patch_size) ** 2 patch_dim = in_channels * patch_size", "import torch from torch import nn, einsum import torch.nn.functional as F from einops", "self.pool == 'mean' else x[:, 0] return self.mlp_head(x) if __name__ == \"__main__\": img", "image_size, patch_size, num_classes, num_frames, dim = 192, depth = 4, heads = 3,", "= nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads", "t=t) x = torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :, :(n + 1)]", "num_patches = (image_size // patch_size) ** 2 patch_dim = in_channels * patch_size **", "self.norm = nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads,", "= dropout)) ])) def forward(self, x): for attn, ff in self.layers: x =", "self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool =", "heads = 3, pool = 'cls', in_channels = 3, dim_head = 64, dropout", "3, dim_head = 64, dropout = 0., emb_dropout = 0., scale_dim = 4,", "import Attention, PreNorm, FeedForward import numpy as np class Transformer(nn.Module): def __init__(self, dim,", "as np class Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout =", "Rearrange from module import Attention, PreNorm, FeedForward import numpy as np class Transformer(nn.Module):", "__init__(self, image_size, patch_size, num_classes, num_frames, dim = 192, depth = 4, heads =", "= 0., emb_dropout = 0., scale_dim = 4, ): super().__init__() assert pool in", "= pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x): x", "= rearrange(x, 'b t n d -> (b t) n d') x =", "patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches +", "dimensions must be divisible by the patch size.' num_patches = (image_size // patch_size)", "'() n d -> b t n d', b = b, t=t) x", "dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head,", "mlp_dim, dropout = 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _", "nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self,", "'pool type must be either cls (cls token) or mean (mean pooling)' assert", "torch.ones([1, 16, 3, 224, 224]).cuda() model = ViViT(224, 16, 100, 16).cuda() parameters =", "cls_space_tokens = repeat(self.space_token, '() n d -> b t n d', b =", "dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ])) def forward(self, x):", "from module import Attention, PreNorm, FeedForward import numpy as np class Transformer(nn.Module): def", "in self.layers: x = attn(x) + x x = ff(x) + x return", "img = torch.ones([1, 16, 3, 224, 224]).cuda() model = ViViT(224, 16, 100, 16).cuda()", "class Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout = 0.): super().__init__()", "torch from torch import nn, einsum import torch.nn.functional as F from einops import", "= 64, dropout = 0., emb_dropout = 0., scale_dim = 4, ): super().__init__()", "scale_dim = 4, ): super().__init__() assert pool in {'cls', 'mean'}, 'pool type must", "-> b t n d', b = b, t=t) x = torch.cat((cls_space_tokens, x),", "np class Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout = 0.):", "if self.pool == 'mean' else x[:, 0] return self.mlp_head(x) if __name__ == \"__main__\":", "pool in {'cls', 'mean'}, 'pool type must be either cls (cls token) or", "x x = ff(x) + x return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size,", "0., emb_dropout = 0., scale_dim = 4, ): super().__init__() assert pool in {'cls',", "= 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in range(depth):", "nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head =", "nn.Linear(dim, num_classes) ) def forward(self, x): x = self.to_patch_embedding(x) b, t, n, _", "= self.space_transformer(x) x = rearrange(x[:, 0], '(b t) ... -> b t ...',", "patch size.' num_patches = (image_size // patch_size) ** 2 patch_dim = in_channels *", "'Image dimensions must be divisible by the patch size.' num_patches = (image_size //", "self.space_transformer(x) x = rearrange(x[:, 0], '(b t) ... -> b t ...', b=b)", "n d') x = self.space_transformer(x) x = rearrange(x[:, 0], '(b t) ... ->", "be either cls (cls token) or mean (mean pooling)' assert image_size % patch_size", "from einops.layers.torch import Rearrange from module import Attention, PreNorm, FeedForward import numpy as", "numpy as np class Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout", "token) or mean (mean pooling)' assert image_size % patch_size == 0, 'Image dimensions", "assert pool in {'cls', 'mean'}, 'pool type must be either cls (cls token)", "p in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' % parameters) out = model(img)", "= self.temporal_transformer(x) x = x.mean(dim = 1) if self.pool == 'mean' else x[:,", "0.): super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([", "0., scale_dim = 4, ): super().__init__() assert pool in {'cls', 'mean'}, 'pool type", "= (image_size // patch_size) ** 2 patch_dim = in_channels * patch_size ** 2", "einops import rearrange, repeat from einops.layers.torch import Rearrange from module import Attention, PreNorm,", "x), dim=1) x = self.temporal_transformer(x) x = x.mean(dim = 1) if self.pool ==", "c)', p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1,", "192, depth = 4, heads = 3, pool = 'cls', in_channels = 3,", "x): x = self.to_patch_embedding(x) b, t, n, _ = x.shape cls_space_tokens = repeat(self.space_token,", "def forward(self, x): for attn, ff in self.layers: x = attn(x) + x", "pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x): x =", "heads, dim_head = dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout))", "/ 1_000_000 print('Trainable Parameters: %.3fM' % parameters) out = model(img) print(\"Shape of out", "(mean pooling)' assert image_size % patch_size == 0, 'Image dimensions must be divisible", "nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x): x = self.to_patch_embedding(x) b, t,", "= 'cls', in_channels = 3, dim_head = 64, dropout = 0., emb_dropout =", "224]).cuda() model = ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters())", "x.shape cls_space_tokens = repeat(self.space_token, '() n d -> b t n d', b", "= torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :, :(n + 1)] x =", "depth = 4, heads = 3, pool = 'cls', in_channels = 3, dim_head", "self.pos_embedding[:, :, :(n + 1)] x = self.dropout(x) x = rearrange(x, 'b t", "num_frames, dim = 192, depth = 4, heads = 3, pool = 'cls',", "or mean (mean pooling)' assert image_size % patch_size == 0, 'Image dimensions must", "x = torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x = x.mean(dim = 1)", "nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token =", "'mean'}, 'pool type must be either cls (cls token) or mean (mean pooling)'", "assert image_size % patch_size == 0, 'Image dimensions must be divisible by the", "x = self.to_patch_embedding(x) b, t, n, _ = x.shape cls_space_tokens = repeat(self.space_token, '()", "self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes) )", "t c (h p1) (w p2) -> b t (h w) (p1 p2", "'() n d -> b n d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1)", "2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h p1) (w p2) -> b", "dropout)) ])) def forward(self, x): for attn, ff in self.layers: x = attn(x)", "dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim,", "= attn(x) + x x = ff(x) + x return self.norm(x) class ViViT(nn.Module):", "in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head = dim_head, dropout =", "d') x = self.space_transformer(x) x = rearrange(x[:, 0], '(b t) ... -> b", "== 0, 'Image dimensions must be divisible by the patch size.' num_patches =", "for attn, ff in self.layers: x = attn(x) + x x = ff(x)", "(p1 p2 c)', p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding", "PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ])) def forward(self, x): for attn, ff", "t n d -> (b t) n d') x = self.space_transformer(x) x =", "def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers =", "dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ])) def forward(self, x): for attn,", "module import Attention, PreNorm, FeedForward import numpy as np class Transformer(nn.Module): def __init__(self,", "rearrange(x[:, 0], '(b t) ... -> b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token,", "= patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim))", "d -> (b t) n d') x = self.space_transformer(x) x = rearrange(x[:, 0],", "nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout =", "= nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer", "= 4, ): super().__init__() assert pool in {'cls', 'mean'}, 'pool type must be", "num_classes) ) def forward(self, x): x = self.to_patch_embedding(x) b, t, n, _ =", "= repeat(self.space_token, '() n d -> b t n d', b = b,", "x = self.temporal_transformer(x) x = x.mean(dim = 1) if self.pool == 'mean' else", "x = attn(x) + x x = ff(x) + x return self.norm(x) class", "== \"__main__\": img = torch.ones([1, 16, 3, 224, 224]).cuda() model = ViViT(224, 16,", "dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim), nn.Linear(dim, num_classes)", "p1) (w p2) -> b t (h w) (p1 p2 c)', p1 =", "x), dim=2) x += self.pos_embedding[:, :, :(n + 1)] x = self.dropout(x) x", "in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' % parameters) out = model(img) print(\"Shape", "for p in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' % parameters) out =", "torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x = x.mean(dim = 1) if self.pool", "torch.nn.functional as F from einops import rearrange, repeat from einops.layers.torch import Rearrange from", "dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential( nn.LayerNorm(dim),", "{'cls', 'mean'}, 'pool type must be either cls (cls token) or mean (mean", "+= self.pos_embedding[:, :, :(n + 1)] x = self.dropout(x) x = rearrange(x, 'b", "= self.dropout(x) x = rearrange(x, 'b t n d -> (b t) n", "<filename>vivit.py import torch from torch import nn, einsum import torch.nn.functional as F from", "= 1) if self.pool == 'mean' else x[:, 0] return self.mlp_head(x) if __name__", "depth, heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head =", "(b t) n d') x = self.space_transformer(x) x = rearrange(x[:, 0], '(b t)", "FeedForward import numpy as np class Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head,", "dim = 192, depth = 4, heads = 3, pool = 'cls', in_channels", "cls_temporal_tokens = repeat(self.temporal_token, '() n d -> b n d', b=b) x =", "= heads, dim_head = dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout =", "import rearrange, repeat from einops.layers.torch import Rearrange from module import Attention, PreNorm, FeedForward", "divisible by the patch size.' num_patches = (image_size // patch_size) ** 2 patch_dim", "else x[:, 0] return self.mlp_head(x) if __name__ == \"__main__\": img = torch.ones([1, 16,", "Attention, PreNorm, FeedForward import numpy as np class Transformer(nn.Module): def __init__(self, dim, depth,", "b t (h w) (p1 p2 c)', p1 = patch_size, p2 = patch_size),", "dim, depth, heads, dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm", "torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :, :(n + 1)] x = self.dropout(x)", "p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p in parameters]) / 1_000_000 print('Trainable Parameters:", "from einops import rearrange, repeat from einops.layers.torch import Rearrange from module import Attention,", "heads, dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim)", "heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool = pool self.mlp_head = nn.Sequential(", "-> b t (h w) (p1 p2 c)', p1 = patch_size, p2 =", "t (h w) (p1 p2 c)', p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim,", "dropout = 0., emb_dropout = 0., scale_dim = 4, ): super().__init__() assert pool", "+ x return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames, dim", "= patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches", "repeat(self.temporal_token, '() n d -> b n d', b=b) x = torch.cat((cls_temporal_tokens, x),", "sum([np.prod(p.size()) for p in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' % parameters) out", "Parameters: %.3fM' % parameters) out = model(img) print(\"Shape of out :\", out.shape) #", "= nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token", "einops.layers.torch import Rearrange from module import Attention, PreNorm, FeedForward import numpy as np", "= in_channels * patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h", "be divisible by the patch size.' num_patches = (image_size // patch_size) ** 2", "x += self.pos_embedding[:, :, :(n + 1)] x = self.dropout(x) x = rearrange(x,", "= x.mean(dim = 1) if self.pool == 'mean' else x[:, 0] return self.mlp_head(x)", "num_frames, num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim,", "mean (mean pooling)' assert image_size % patch_size == 0, 'Image dimensions must be", "dim)) self.temporal_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.dropout = nn.Dropout(emb_dropout) self.pool", "1)] x = self.dropout(x) x = rearrange(x, 'b t n d -> (b", "nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads =", "Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers", "d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x = x.mean(dim", "emb_dropout = 0., scale_dim = 4, ): super().__init__() assert pool in {'cls', 'mean'},", "= filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p in parameters]) /", "b, t=t) x = torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :, :(n +", "** 2 patch_dim = in_channels * patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b", "): super().__init__() assert pool in {'cls', 'mean'}, 'pool type must be either cls", "Rearrange('b t c (h p1) (w p2) -> b t (h w) (p1", "dim=2) x += self.pos_embedding[:, :, :(n + 1)] x = self.dropout(x) x =", "1_000_000 print('Trainable Parameters: %.3fM' % parameters) out = model(img) print(\"Shape of out :\",", "either cls (cls token) or mean (mean pooling)' assert image_size % patch_size ==", "t) n d') x = self.space_transformer(x) x = rearrange(x[:, 0], '(b t) ...", "filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p in parameters]) / 1_000_000", "patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h p1) (w p2)", "0] return self.mlp_head(x) if __name__ == \"__main__\": img = torch.ones([1, 16, 3, 224,", "= ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters =", "16, 100, 16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for", "nn.Parameter(torch.randn(1, 1, dim)) self.space_transformer = Transformer(dim, depth, heads, dim_head, dim*scale_dim, dropout) self.temporal_token =", "d', b = b, t=t) x = torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:,", "dim=1) x = self.temporal_transformer(x) x = x.mean(dim = 1) if self.pool == 'mean'", "attn, ff in self.layers: x = attn(x) + x x = ff(x) +", "self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head = dim_head, dropout = dropout)), PreNorm(dim,", "p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames,", "= 192, depth = 4, heads = 3, pool = 'cls', in_channels =", "repeat from einops.layers.torch import Rearrange from module import Attention, PreNorm, FeedForward import numpy", "dropout = dropout)) ])) def forward(self, x): for attn, ff in self.layers: x", "import numpy as np class Transformer(nn.Module): def __init__(self, dim, depth, heads, dim_head, mlp_dim,", "x = self.space_transformer(x) x = rearrange(x[:, 0], '(b t) ... -> b t", "pooling)' assert image_size % patch_size == 0, 'Image dimensions must be divisible by", "-> b n d', b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x)", "from torch import nn, einsum import torch.nn.functional as F from einops import rearrange,", "attn(x) + x x = ff(x) + x return self.norm(x) class ViViT(nn.Module): def", "_ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head = dim_head, dropout", "** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c (h p1) (w p2) ->", "patch_dim = in_channels * patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t c", "__init__(self, dim, depth, heads, dim_head, mlp_dim, dropout = 0.): super().__init__() self.layers = nn.ModuleList([])", "= b, t=t) x = torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :, :(n", "+ x x = ff(x) + x return self.norm(x) class ViViT(nn.Module): def __init__(self,", "by the patch size.' num_patches = (image_size // patch_size) ** 2 patch_dim =", "= rearrange(x[:, 0], '(b t) ... -> b t ...', b=b) cls_temporal_tokens =", "dim_head = dim_head, dropout = dropout)), PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)) ]))", "ViViT(224, 16, 100, 16).cuda() parameters = filter(lambda p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size())", "1) if self.pool == 'mean' else x[:, 0] return self.mlp_head(x) if __name__ ==", "-> (b t) n d') x = self.space_transformer(x) x = rearrange(x[:, 0], '(b", "self.layers: x = attn(x) + x x = ff(x) + x return self.norm(x)", "])) def forward(self, x): for attn, ff in self.layers: x = attn(x) +", "return self.norm(x) class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames, dim = 192,", "p2 c)', p1 = patch_size, p2 = patch_size), nn.Linear(patch_dim, dim), ) self.pos_embedding =", "b=b) x = torch.cat((cls_temporal_tokens, x), dim=1) x = self.temporal_transformer(x) x = x.mean(dim =", "parameters]) / 1_000_000 print('Trainable Parameters: %.3fM' % parameters) out = model(img) print(\"Shape of", "nn.LayerNorm(dim), nn.Linear(dim, num_classes) ) def forward(self, x): x = self.to_patch_embedding(x) b, t, n,", "b = b, t=t) x = torch.cat((cls_space_tokens, x), dim=2) x += self.pos_embedding[:, :,", "self.layers = nn.ModuleList([]) self.norm = nn.LayerNorm(dim) for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim,", "dim), ) self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1,", "import torch.nn.functional as F from einops import rearrange, repeat from einops.layers.torch import Rearrange", "= nn.Sequential( Rearrange('b t c (h p1) (w p2) -> b t (h", "... -> b t ...', b=b) cls_temporal_tokens = repeat(self.temporal_token, '() n d ->", "% parameters) out = model(img) print(\"Shape of out :\", out.shape) # [B, num_classes]", "2 patch_dim = in_channels * patch_size ** 2 self.to_patch_embedding = nn.Sequential( Rearrange('b t", "repeat(self.space_token, '() n d -> b t n d', b = b, t=t)", "= repeat(self.temporal_token, '() n d -> b n d', b=b) x = torch.cat((cls_temporal_tokens,", "n d -> b t n d', b = b, t=t) x =", "heads, dim_head, dim*scale_dim, dropout) self.temporal_token = nn.Parameter(torch.randn(1, 1, dim)) self.temporal_transformer = Transformer(dim, depth,", "model.parameters()) parameters = sum([np.prod(p.size()) for p in parameters]) / 1_000_000 print('Trainable Parameters: %.3fM'", "torch import nn, einsum import torch.nn.functional as F from einops import rearrange, repeat", ") self.pos_embedding = nn.Parameter(torch.randn(1, num_frames, num_patches + 1, dim)) self.space_token = nn.Parameter(torch.randn(1, 1,", "as F from einops import rearrange, repeat from einops.layers.torch import Rearrange from module", "class ViViT(nn.Module): def __init__(self, image_size, patch_size, num_classes, num_frames, dim = 192, depth =", "for _ in range(depth): self.layers.append(nn.ModuleList([ PreNorm(dim, Attention(dim, heads = heads, dim_head = dim_head,", "p: p.requires_grad, model.parameters()) parameters = sum([np.prod(p.size()) for p in parameters]) / 1_000_000 print('Trainable" ]
[ "</td> : use aa_link row.song.artist, 'artist', td=True : use aa_link row.song.album, 'album', td=True", "data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for status in ('Ignored',", "('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> :", ": if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div>", "view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th>", "from .. import table_class, table_style, caption_args : from ..helpers.helpers import aa_link : def", "end &nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'>", "<tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for i, r", "if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div> </caption>", "but this should probably be an error! : print('Missing song', r.song_id) <td colspan=7>Came", "should probably be an error! : print('Missing song', r.song_id) <td colspan=7>Came across a", "<button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul", "songs for now, but this should probably be an error! : print('Missing song',", ": from ..helpers.helpers import aa_link : def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None,", "row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div> </td>", "missing songs for now, but this should probably be an error! : print('Missing", ": print('Missing song', r.song_id) <td colspan=7>Came across a bad row in the requests", "import table_class, table_style, caption_args : from ..helpers.helpers import aa_link : def requeststemplate page=_page,", "</table> : end : end : def requestrow ctx, row <tr id='rr_${row.id}'> <td", "</tbody> </table> : end : end : def requestrow ctx, row <tr id='rr_${row.id}'>", "href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th>", "rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if rv != view_status <li><a", "<div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span>", "end : end </tbody> </table> : end : end : def requestrow ctx,", "encoding: cinje : from ..template import page as _page : from .. import", "..template import page as _page : from .. import table_class, table_style, caption_args :", "table_class, table_style, caption_args : from ..helpers.helpers import aa_link : def requeststemplate page=_page, title=None,", "To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored',", "btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for status", "import aa_link : def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using", "def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using page title, ctx,", "end : end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th>", "</ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody>", "<li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested", "status in ('Ignored', 'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize() != status <li><a", "ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests", "btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for", ": end </ul> </div> </td> : use aa_link row.song.artist, 'artist', td=True : use", "use requestrow ctx, r : except AttributeError # TODO: Ignore missing songs for", "('Ignored', 'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li>", "class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button>", "in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li>", ": except TypeError : pass : end &nbsp; <div class='btn-group'> <button type='button' class='btn", "cinje : from ..template import page as _page : from .. import table_class,", "class='caret'></span> </button> <ul class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played')", "</button> <ul class=\"dropdown-menu\"> : for status in ('Ignored', 'New', 'Pending', 'Played', 'Delete') :", "except AttributeError # TODO: Ignore missing songs for now, but this should probably", "'artist', td=True : use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td", "song id ${r.song_id}</td></tr> : end : if i % 50 : flush :", "enumerate(requestlist) : try : use requestrow ctx, r : except AttributeError # TODO:", "'Pending', 'Played') : if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end", "Played</th> </tr> </thead> <tbody> : for i, r in enumerate(requestlist) : try :", "across a bad row in the requests list for song id ${r.song_id}</td></tr> :", "requests list for song id ${r.song_id}</td></tr> : end : if i % 50", "btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'>", "requestlist=[], view_status=None, requestinfo=None : using page title, ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{'", ": end : def requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\">", "the requests list for song id ${r.song_id}</td></tr> : end : if i %", "<td colspan=7>Came across a bad row in the requests list for song id", "data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td data-value=''>&nbsp;</td> : end </tr>", "try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass : end &nbsp; <div class='btn-group'> <button", "requestrow ctx, r : except AttributeError # TODO: Ignore missing songs for now,", "row.song.artist, 'artist', td=True : use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td>", "..helpers.helpers import aa_link : def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None :", "&nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests", "r : except AttributeError # TODO: Ignore missing songs for now, but this", "<th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for i, r in enumerate(requestlist)", "flush : end : end </tbody> </table> : end : end : def", "end : if i % 50 : flush : end : end </tbody>", "class=\"dropdown-menu\"> : for status in ('Ignored', 'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize()", "<td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td data-value=''>&nbsp;</td> :", ": try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td data-value=''>&nbsp;</td> : end </tr> :", "'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td>", "r in enumerate(requestlist) : try : use requestrow ctx, r : except AttributeError", "end </ul> </div> </td> : use aa_link row.song.artist, 'artist', td=True : use aa_link", "<caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass : end", "table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))})", "btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button> <ul", "from ..helpers.helpers import aa_link : def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None", "rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div> </caption> <thead>", "i, r in enumerate(requestlist) : try : use requestrow ctx, r : except", "AttributeError # TODO: Ignore missing songs for now, but this should probably be", ": use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> :", "for now, but this should probably be an error! : print('Missing song', r.song_id)", "class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button>", "# encoding: cinje : from ..template import page as _page : from ..", "use aa_link row.song.artist, 'artist', td=True : use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td>", "Ignore missing songs for now, but this should probably be an error! :", "end : end </ul> </div> </td> : use aa_link row.song.artist, 'artist', td=True :", "<tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\"", "id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass :", "import page as _page : from .. import table_class, table_style, caption_args : from", ".. import table_class, table_style, caption_args : from ..helpers.helpers import aa_link : def requeststemplate", "class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View:", "lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests :", "class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if rv", "ctx=None, requestlist=[], view_status=None, requestinfo=None : using page title, ctx, lang=\"en\" : table_class.append('sortable') <table", "def requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn", "class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> :", "ctx, r : except AttributeError # TODO: Ignore missing songs for now, but", "from ..template import page as _page : from .. import table_class, table_style, caption_args", "end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead>", ": end : if i % 50 : flush : end : end", "use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try", "dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> :", "requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs", "print('Missing song', r.song_id) <td colspan=7>Came across a bad row in the requests list", "a bad row in the requests list for song id ${r.song_id}</td></tr> : end", "id ${r.song_id}</td></tr> : end : if i % 50 : flush : end", ": for status in ('Ignored', 'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize() !=", "ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary", "'Delete') : if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end", "<td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td data-value=''>&nbsp;</td> : end", "<ul class=\"dropdown-menu\"> : for status in ('Ignored', 'New', 'Pending', 'Played', 'Delete') : if", "if i % 50 : flush : end : end </tbody> </table> :", "!= status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div> </td> :", "</tr> </thead> <tbody> : for i, r in enumerate(requestlist) : try : use", "bad row in the requests list for song id ${r.song_id}</td></tr> : end :", "in ('Ignored', 'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id,", ": use aa_link row.song.artist, 'artist', td=True : use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td>", "if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div>", "</div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> :", ": end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr>", "<table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) :", ": if i % 50 : flush : end : end </tbody> </table>", "#{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass : end &nbsp;", "be an error! : print('Missing song', r.song_id) <td colspan=7>Came across a bad row", "<th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for i, r in enumerate(requestlist) : try", ": end &nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true'", "class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except", ": try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass : end &nbsp; <div class='btn-group'>", "(${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass : end &nbsp; <div class='btn-group'> <button type='button'", "<ul class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if", "<td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\">", "# TODO: Ignore missing songs for now, but this should probably be an", "now, but this should probably be an error! : print('Missing song', r.song_id) <td", "aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for status in ('Ignored', 'New',", "'.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError", ": try : use requestrow ctx, r : except AttributeError # TODO: Ignore", "</ul> </div> </td> : use aa_link row.song.artist, 'artist', td=True : use aa_link row.song.album,", "aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td", "id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\"", "<td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except", "</caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for", "requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using page title, ctx, lang=\"en\"", "TypeError : pass : end &nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs btn-primary", "'Pending', 'Played', 'Delete') : if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end", ": pass : end &nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle'", "row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by}", ": from ..template import page as _page : from .. import table_class, table_style,", "pass : end &nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown'", "aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for rv in", "'Played') : if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul>", "aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for status in ('Ignored', 'New', 'Pending',", ": end </tbody> </table> : end : end : def requestrow ctx, row", "td=True : use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td>", "error! : print('Missing song', r.song_id) <td colspan=7>Came across a bad row in the", "using page title, ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'>", "<button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span", "try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td data-value=''>&nbsp;</td> : end </tr> : end", "i % 50 : flush : end : end </tbody> </table> : end", ": if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul>", "'.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass", ": using page title, ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\"", "row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\"", "type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span>", "for i, r in enumerate(requestlist) : try : use requestrow ctx, r :", ": except AttributeError # TODO: Ignore missing songs for now, but this should", "this should probably be an error! : print('Missing song', r.song_id) <td colspan=7>Came across", "in the requests list for song id ${r.song_id}</td></tr> : end : if i", ": end : end : def requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'>", "page as _page : from .. import table_class, table_style, caption_args : from ..helpers.helpers", "page title, ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption", "'Ignored', 'New', 'Pending', 'Played') : if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end", "song', r.song_id) <td colspan=7>Came across a bad row in the requests list for", "aa_link : def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using page", "type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\">", "Requests To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for rv in ('New/Pending',", ": table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try", "caption_args : from ..helpers.helpers import aa_link : def requeststemplate page=_page, title=None, ctx=None, requestlist=[],", "${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored', 'New', 'Pending',", "as _page : from .. import table_class, table_style, caption_args : from ..helpers.helpers import", "50 : flush : end : end </tbody> </table> : end : end", ": end : end </ul> </div> </caption> <thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last", "${r.song_id}</td></tr> : end : if i % 50 : flush : end :", ": for rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if rv !=", "list for song id ${r.song_id}</td></tr> : end : if i % 50 :", "_page : from .. import table_class, table_style, caption_args : from ..helpers.helpers import aa_link", "style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()} Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError :", "'New', 'Pending', 'Played') : if rv != view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end :", "dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for status in", "By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for i, r in enumerate(requestlist) :", "<li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div> </td> : use aa_link", ": use requestrow ctx, r : except AttributeError # TODO: Ignore missing songs", "probably be an error! : print('Missing song', r.song_id) <td colspan=7>Came across a bad", "view_status=None, requestinfo=None : using page title, ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\"", "!= view_status <li><a href='/admin/?view_status=${rv}'>${rv}</a></li> : end : end </ul> </div> </caption> <thead> <tr>", "<reponame>bmillham/djrq2 # encoding: cinje : from ..template import page as _page : from", "Requests : try (${ctx.time_length(int(requestinfo.request_length))}) : except TypeError : pass : end &nbsp; <div", "</thead> <tbody> : for i, r in enumerate(requestlist) : try : use requestrow", "<th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for i, r in", "<thead> <tr> <th>Status</th><th>Artist</th><th>Album</th> <th>Title</th><th>Length</th><th>Requested By</th> <th>Comment</th><th>Requested</th><th>Last Played</th> </tr> </thead> <tbody> : for i,", "for status in ('Ignored', 'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize() != status", "'Played', 'Delete') : if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end :", "status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div> </td> : use", "td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> :", "href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div> </td> : use aa_link row.song.artist,", "View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored', 'New',", ": end : end </tbody> </table> : end : end : def requestrow", "TODO: Ignore missing songs for now, but this should probably be an error!", ": end : end </ul> </div> </td> : use aa_link row.song.artist, 'artist', td=True", ": for i, r in enumerate(requestlist) : try : use requestrow ctx, r", "colspan=7>Came across a bad row in the requests list for song id ${r.song_id}</td></tr>", "${row.status.capitalize()}<span class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for status in ('Ignored', 'New', 'Pending', 'Played',", "</div> </td> : use aa_link row.song.artist, 'artist', td=True : use aa_link row.song.album, 'album',", "aa_link row.song.artist, 'artist', td=True : use aa_link row.song.album, 'album', td=True <td>${row.song.title}</td> <td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td>", "table_style, caption_args : from ..helpers.helpers import aa_link : def requeststemplate page=_page, title=None, ctx=None,", "page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using page title, ctx, lang=\"en\" :", "status.lower())}>${status.capitalize()}</a></li> : end : end </ul> </div> </td> : use aa_link row.song.artist, 'artist',", "except TypeError : pass : end &nbsp; <div class='btn-group'> <button type='button' class='btn btn-xs", "for song id ${r.song_id}</td></tr> : end : if i % 50 : flush", ": def requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\"", "% 50 : flush : end : end </tbody> </table> : end :", ": def requeststemplate page=_page, title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using page title,", "'New', 'Pending', 'Played', 'Delete') : if row.status.capitalize() != status <li><a href=#{\"/admin/?change_status&id={}&status={}\".format(row.id, status.lower())}>${status.capitalize()}</a></li> :", "an error! : print('Missing song', r.song_id) <td colspan=7>Came across a bad row in", "</button> <ul class='dropdown-menu'> : for rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') :", "requestinfo=None : using page title, ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{'", ": from .. import table_class, table_style, caption_args : from ..helpers.helpers import aa_link :", "<div class='btn-group'> <button type='button' class='btn btn-xs btn-primary dropdown-toggle' data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To", "r.song_id) <td colspan=7>Came across a bad row in the requests list for song", "end : def requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div class=\"btn-group\"> <button", "in enumerate(requestlist) : try : use requestrow ctx, r : except AttributeError #", "try : use requestrow ctx, r : except AttributeError # TODO: Ignore missing", "end : end : def requestrow ctx, row <tr id='rr_${row.id}'> <td data-value='${row.status}'> <div", ": flush : end : end </tbody> </table> : end : end :", "data-value='${row.status}'> <div class=\"btn-group\"> <button type=\"button\" class=\"btn btn-xs btn-primary dropdown-toggle\" data-toggle=\"dropdown\" aria-haspopup=\"true\" aria-expanded=\"false\"> ${row.status.capitalize()}<span", "title=None, ctx=None, requestlist=[], view_status=None, requestinfo=None : using page title, ctx, lang=\"en\" : table_class.append('sortable')", "title, ctx, lang=\"en\" : table_class.append('sortable') <table class=\"#{' '.join(table_class)}\" style=\"#{' '.join(table_style)}\" id='request-table'> <caption #{caption_args}>${requestlist.count()}", "class=\"caret\"></span> </button> <ul class=\"dropdown-menu\"> : for status in ('Ignored', 'New', 'Pending', 'Played', 'Delete')", "end </tbody> </table> : end : end : def requestrow ctx, row <tr", "for rv in ('New/Pending', 'Ignored', 'New', 'Pending', 'Played') : if rv != view_status", "<td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td data-value=''>&nbsp;</td>", "<tbody> : for i, r in enumerate(requestlist) : try : use requestrow ctx,", "row in the requests list for song id ${r.song_id}</td></tr> : end : if", "aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for rv", "<td>${ctx.format_time(row.song.time)}</td> <td>${row.name}</td> <td>${row.msg}</td> <td data-value='${row.t_stamp}'>${ctx.time_ago(row.t_stamp)}</td> : try <td data-value='${row.song.played[0].date_played}'>${row.song.played[0].played_by} ${ctx.time_ago(row.song.played[0].date_played)}</td> : except <td", "data-toggle='dropdown' aria-haspopup='true' aria-expanded='false'> Requests To View: ${view_status}<span class='caret'></span> </button> <ul class='dropdown-menu'> : for" ]
[ "import the necessary packages from pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model model", "packages from pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model model = LeNet.build(28, 28,", "LeNet from tensorflow.keras.utils import plot_model model = LeNet.build(28, 28, 3, 3) plot_model(model, show_shapes=True,", "from pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model model = LeNet.build(28, 28, 3,", "from tensorflow.keras.utils import plot_model model = LeNet.build(28, 28, 3, 3) plot_model(model, show_shapes=True, to_file=\"lenet.png\")", "pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model model = LeNet.build(28, 28, 3, 3)", "import LeNet from tensorflow.keras.utils import plot_model model = LeNet.build(28, 28, 3, 3) plot_model(model,", "# import the necessary packages from pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model", "the necessary packages from pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model model =", "necessary packages from pyimagesearch.nn.conv.lenet import LeNet from tensorflow.keras.utils import plot_model model = LeNet.build(28," ]
[ "3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj =", "weight < 0: all_non_negative = False model.setMaximize() for u, v, d in graph.edges(data=True):", "(node_variables, edge_variables), model def triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables = {} for", "= model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for i, j, k", "x_ij = _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k,", "model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0 ) if not all_non_negative: model.addCons(", "v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] +", "node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] + node_variables[v] -", "in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight = graph.get_edge_data(u,", ") model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0 ) if not all_non_negative:", "= naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"] else: weight =", "= naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons(", "j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij", "def naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables = {} for v in graph.nodes():", "scip.Model(\"Triangle MaxCut\") edge_variables = {} for u, v in itertools.combinations(graph.nodes(), 2): edge_name =", "naming def naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables = {} for v in", "ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if weight < 0: all_non_negative = False model.setMaximize()", "graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2", "+ edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0", "False model.setMaximize() for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons(", "= _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j,", "graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"] else: weight = 0 edge_variables[edge_name] = model.addVar(", "v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] -", "model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables), model", "v)[\"weight\"] else: weight = 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\"", "edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij + x_ik + x_kj <= 2)", "= graph.get_edge_data(u, v)[\"weight\"] else: weight = 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight,", "0 ) model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0 ) return (node_variables,", "u, v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight", "obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(), 3): x_ij", "x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij + x_ik", "itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"]", "naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"] else: weight = 0", "+ node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name]", "for i, j, k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik", "all_non_negative = True for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v)", "in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <=", "triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables = {} for u, v in itertools.combinations(graph.nodes(),", "_get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik + x_kj)", "ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(), 3):", ") if not all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0 )", "model = scip.Model(\"Triangle MaxCut\") edge_variables = {} for u, v in itertools.combinations(graph.nodes(), 2):", "in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables = {} all_non_negative", "edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"] else: weight", "<= x_ik + x_kj) model.addCons(x_ij + x_ik + x_kj <= 2) return edge_variables,", "obj=weight, name=edge_name, vtype=\"B\" ) if weight < 0: all_non_negative = False model.setMaximize() for", "node_variables = {} for v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v),", "+ x_ik + x_kj <= 2) return edge_variables, model def _get_edge_variable(u, v, edge_variables):", "= 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for", "+ x_kj) model.addCons(x_ij + x_ik + x_kj <= 2) return edge_variables, model def", "vtype=\"B\" ) if weight < 0: all_non_negative = False model.setMaximize() for u, v,", "geco.mips.utilities.naming as naming def naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables = {} for", "<= 2 ) model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0 ) if", "- edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0", "edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0 )", "i, j, k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik =", "for v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables =", "<= 0 ) if not all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <=", "model.addCons(x_ij + x_ik + x_kj <= 2) return edge_variables, model def _get_edge_variable(u, v,", "model = scip.Model(\"Naive MaxCut\") node_variables = {} for v in graph.nodes(): node_variables[v] =", "-node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables), model def", "MaxCut\") edge_variables = {} for u, v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u,", "node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <=", "node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] - node_variables[v] +", "<= 0 ) model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0 ) return", "edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij +", "-node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0 ) if not all_non_negative: model.addCons( node_variables[u]", "model def triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables = {} for u, v", "edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <=", "v): weight = graph.get_edge_data(u, v)[\"weight\"] else: weight = 0 edge_variables[edge_name] = model.addVar( lb=0,", "ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables = {} all_non_negative = True for u, v,", "d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if weight <", "u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name]", "node_variables[v] - edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables), model def triangle(graph): model", "else: weight = 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" )", "import pyscipopt as scip import geco.mips.utilities.naming as naming def naive(graph): model = scip.Model(\"Naive", "return edge_variables, model def _get_edge_variable(u, v, edge_variables): edge_name = naming.undirected_edge_name(u, v) return edge_variables[edge_name]", "if weight < 0: all_non_negative = False model.setMaximize() for u, v, d in", "u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v]", "= {} all_non_negative = True for u, v, d in graph.edges(data=True): edge_name =", "v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name] =", "naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables = {} for v in graph.nodes(): node_variables[v]", "_get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij + x_ik + x_kj", "edge_name = naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight,", "= model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables = {} all_non_negative = True for", "= naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name,", "= d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if weight", "model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if weight < 0: all_non_negative =", ") model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables),", "model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables = {} all_non_negative = True for u,", "obj=0, name=str(v), vtype=\"B\") edge_variables = {} all_non_negative = True for u, v, d", "0: all_non_negative = False model.setMaximize() for u, v, d in graph.edges(data=True): edge_name =", "v) if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"] else: weight = 0 edge_variables[edge_name]", "_get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables)", "pyscipopt as scip import geco.mips.utilities.naming as naming def naive(graph): model = scip.Model(\"Naive MaxCut\")", "model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij + x_ik + x_kj <= 2) return", "in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables)", "scip import geco.mips.utilities.naming as naming def naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables =", "all_non_negative = False model.setMaximize() for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u,", "= _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij + x_ik +", "edge_variables = {} for u, v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v)", "v) weight = d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" )", "name=str(v), vtype=\"B\") edge_variables = {} all_non_negative = True for u, v, d in", "x_ik = _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik", "in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0,", "model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] - node_variables[v]", "{} for u, v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u,", "graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1,", "as naming def naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables = {} for v", "naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u]", "vtype=\"B\") edge_variables = {} all_non_negative = True for u, v, d in graph.edges(data=True):", ") model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j,", "j, edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij + x_ik + x_kj <=", "import geco.mips.utilities.naming as naming def naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables = {}", "itertools import pyscipopt as scip import geco.mips.utilities.naming as naming def naive(graph): model =", "k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik + x_kj) model.addCons(x_ij", "node_variables[v] + edge_variables[edge_name] <= 0 ) if not all_non_negative: model.addCons( node_variables[u] - node_variables[v]", "edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for i, j,", "+ x_kj <= 2) return edge_variables, model def _get_edge_variable(u, v, edge_variables): edge_name =", "if not all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons(", "vtype=\"B\" ) model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i,", "= {} for u, v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if", "{} all_non_negative = True for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u,", "itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k, edge_variables) x_kj", "lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(),", "d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name]", "edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables), model def triangle(graph): model = scip.Model(\"Triangle", "0 ) return (node_variables, edge_variables), model def triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables", "import itertools import pyscipopt as scip import geco.mips.utilities.naming as naming def naive(graph): model", "d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name] = model.addVar(", "edge_variables[edge_name] <= 2 ) model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0 )", "x_ik + x_kj <= 2) return edge_variables, model def _get_edge_variable(u, v, edge_variables): edge_name", "edge_variables = {} all_non_negative = True for u, v, d in graph.edges(data=True): edge_name", "True for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight =", "< 0: all_non_negative = False model.setMaximize() for u, v, d in graph.edges(data=True): edge_name", "edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if weight < 0:", "2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"] else:", "<= 2) return edge_variables, model def _get_edge_variable(u, v, edge_variables): edge_name = naming.undirected_edge_name(u, v)", "graph.get_edge_data(u, v)[\"weight\"] else: weight = 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name,", "name=edge_name, vtype=\"B\" ) if weight < 0: all_non_negative = False model.setMaximize() for u,", "not all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u]", "<= 0 ) return (node_variables, edge_variables), model def triangle(graph): model = scip.Model(\"Triangle MaxCut\")", "MaxCut\") node_variables = {} for v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0,", "weight = graph.get_edge_data(u, v)[\"weight\"] else: weight = 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1,", "graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables = {} all_non_negative =", "node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables = {} all_non_negative = True", "= scip.Model(\"Naive MaxCut\") node_variables = {} for v in graph.nodes(): node_variables[v] = model.addVar(lb=0,", "for u, v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v):", ") if weight < 0: all_non_negative = False model.setMaximize() for u, v, d", "model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables)", "2) return edge_variables, model def _get_edge_variable(u, v, edge_variables): edge_name = naming.undirected_edge_name(u, v) return", "= {} for v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\")", "naming.undirected_edge_name(u, v) weight = d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\"", "= scip.Model(\"Triangle MaxCut\") edge_variables = {} for u, v in itertools.combinations(graph.nodes(), 2): edge_name", "0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for i,", "= True for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight", "model.setMaximize() for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u]", "all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] +", "= _get_edge_variable(i, k, edge_variables) x_kj = _get_edge_variable(k, j, edge_variables) model.addCons(x_ij <= x_ik +", "if graph.has_edge(u, v): weight = graph.get_edge_data(u, v)[\"weight\"] else: weight = 0 edge_variables[edge_name] =", "v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables = {}", "- edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables), model def triangle(graph): model =", "2 ) model.addCons( -node_variables[u] - node_variables[v] + edge_variables[edge_name] <= 0 ) if not", "= model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if weight < 0: all_non_negative", "x_ik + x_kj) model.addCons(x_ij + x_ik + x_kj <= 2) return edge_variables, model", "edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] + node_variables[v] + edge_variables[edge_name] <= 2 )", "model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize() for i, j, k in", "v in itertools.combinations(graph.nodes(), 2): edge_name = naming.undirected_edge_name(u, v) if graph.has_edge(u, v): weight =", "for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) model.addCons( node_variables[u] +", "x_kj) model.addCons(x_ij + x_ik + x_kj <= 2) return edge_variables, model def _get_edge_variable(u,", "as scip import geco.mips.utilities.naming as naming def naive(graph): model = scip.Model(\"Naive MaxCut\") node_variables", "weight = d[\"weight\"] edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if", "for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v) weight = d[\"weight\"]", "weight = 0 edge_variables[edge_name] = model.addVar( lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) model.setMaximize()", "- node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name]", "node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] + node_variables[v] - edge_variables[edge_name] <=", ") return (node_variables, edge_variables), model def triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables =", "edge_variables), model def triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables = {} for u,", "k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i, k,", "lb=0, ub=1, obj=weight, name=edge_name, vtype=\"B\" ) if weight < 0: all_non_negative = False", "j, k in itertools.combinations(graph.nodes(), 3): x_ij = _get_edge_variable(i, j, edge_variables) x_ik = _get_edge_variable(i,", "{} for v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1, obj=0, name=str(v), vtype=\"B\") edge_variables", "name=edge_name, vtype=\"B\" ) model.setMaximize() for i, j, k in itertools.combinations(graph.nodes(), 3): x_ij =", "return (node_variables, edge_variables), model def triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables = {}", "+ node_variables[v] - edge_variables[edge_name] <= 0 ) return (node_variables, edge_variables), model def triangle(graph):", "0 ) if not all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0", "- node_variables[v] + edge_variables[edge_name] <= 0 ) if not all_non_negative: model.addCons( node_variables[u] -", "scip.Model(\"Naive MaxCut\") node_variables = {} for v in graph.nodes(): node_variables[v] = model.addVar(lb=0, ub=1,", "= False model.setMaximize() for u, v, d in graph.edges(data=True): edge_name = naming.undirected_edge_name(u, v)", "model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name] <= 0 ) model.addCons( -node_variables[u] + node_variables[v]", "+ edge_variables[edge_name] <= 0 ) if not all_non_negative: model.addCons( node_variables[u] - node_variables[v] -", "x_kj <= 2) return edge_variables, model def _get_edge_variable(u, v, edge_variables): edge_name = naming.undirected_edge_name(u,", "edge_variables[edge_name] <= 0 ) if not all_non_negative: model.addCons( node_variables[u] - node_variables[v] - edge_variables[edge_name]", "def triangle(graph): model = scip.Model(\"Triangle MaxCut\") edge_variables = {} for u, v in" ]
[ "open(templ.get_file_name(), 'r') # Setup parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras']", "imgbasedir renderer = ProblemRenderer() if not options.quiet: print('Parsing TeX source...') doc = tex.parse()", "= string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging,", "import logging import subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from", "<reponame>jsannemo/problemtools #! /usr/bin/env python3 # -*- coding: utf-8 -*- import re import os.path", "tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer", "source...') doc = tex.parse() texfile.close() # Go to destdir os.chdir(destdir) if not options.quiet:", "/usr/bin/env python3 # -*- coding: utf-8 -*- import re import os.path import string", "def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir", "import ProblemsetMacros from . import template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir", "template.Template(problem, language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(), 'r') # Setup parser and", "= problem # Set up template if necessary with template.Template(problem, language=options.language, title=options.title) as", "problem # Set up template if necessary with template.Template(problem, language=options.language, title=options.title) as templ:", "tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if not options.quiet: print('Parsing TeX source...') doc", "imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile =", "# Set up template if necessary with template.Template(problem, language=options.language, title=options.title) as templ: texfile", "language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(), 'r') # Setup parser and renderer", "import string import argparse import logging import subprocess import plasTeX.TeX import plasTeX.Logging from", "doc = tex.parse() texfile.close() # Go to destdir os.chdir(destdir) if not options.quiet: print('Rendering!')", "have not figured out any way of stopping the plasTeX # renderer from", "tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url']", "Go to destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have", "import argparse import logging import subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import", "tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if not options.quiet: print('Parsing", "template if necessary with template.Template(problem, language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(), 'r')", "from . import template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase)", "= False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if not", "options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)'", "not options.quiet: print('Parsing TeX source...') doc = tex.parse() texfile.close() # Go to destdir", "with template.Template(problem, language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(), 'r') # Setup parser", "= plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename']", "parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not", "figured out any way of stopping the plasTeX # renderer from generating a", "= True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] =", "import os.path import string import argparse import logging import subprocess import plasTeX.TeX import", "tex.parse() texfile.close() # Go to destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) #", "= options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] =", "not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have not figured out any way", "options.loglevel.upper())) texfile = problem # Set up template if necessary with template.Template(problem, language=options.language,", "import re import os.path import string import argparse import logging import subprocess import", ".ProblemPlasTeX import ProblemsetMacros from . import template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0]", "= open(templ.get_file_name(), 'r') # Setup parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex)", "False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if not options.quiet:", "Annoying: I have not figured out any way of stopping the plasTeX #", "title=options.title) as templ: texfile = open(templ.get_file_name(), 'r') # Setup parser and renderer etc", "coding: utf-8 -*- import re import os.path import string import argparse import logging", "not figured out any way of stopping the plasTeX # renderer from generating", "plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set up template if necessary", "= tex.parse() texfile.close() # Go to destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc)", "'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer()", "= destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] =", "ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from . import template def convert(problem, options=None): problembase", "template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase)", "= string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper()))", "python3 # -*- coding: utf-8 -*- import re import os.path import string import", "subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros", "etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] =", "ProblemRenderer() if not options.quiet: print('Parsing TeX source...') doc = tex.parse() texfile.close() # Go", "options.quiet: print('Parsing TeX source...') doc = tex.parse() texfile.close() # Go to destdir os.chdir(destdir)", "re import os.path import string import argparse import logging import subprocess import plasTeX.TeX", "ProblemsetMacros from . import template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir =", "'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if not options.quiet: print('Parsing TeX source...')", "texfile.close() # Go to destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying:", "'r') # Setup parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] =", "options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have not figured out any way of", "destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir", "= 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if not options.quiet: print('Parsing TeX", "tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True", "os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have not figured out", "if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set", "= os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet:", "ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile", "convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir =", "I have not figured out any way of stopping the plasTeX # renderer", "destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else:", "plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set up template if necessary with template.Template(problem,", "tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames']", "plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] =", "renderer.render(doc) # Annoying: I have not figured out any way of stopping the", "Set up template if necessary with template.Template(problem, language=options.language, title=options.title) as templ: texfile =", "from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from . import template def", "options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set up template if necessary with", "templ: texfile = open(templ.get_file_name(), 'r') # Setup parser and renderer etc tex =", "if not options.quiet: print('Parsing TeX source...') doc = tex.parse() texfile.close() # Go to", "plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from . import template", "-*- import re import os.path import string import argparse import logging import subprocess", "string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper()))", "string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile", "options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase)", "if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have not figured out any", "# Setup parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css", "any way of stopping the plasTeX # renderer from generating a .paux file", "#! /usr/bin/env python3 # -*- coding: utf-8 -*- import re import os.path import", "= imgbasedir renderer = ProblemRenderer() if not options.quiet: print('Parsing TeX source...') doc =", "import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from . import template def convert(problem, options=None):", "problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if", "-*- coding: utf-8 -*- import re import os.path import string import argparse import", ". import template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile", "import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from", "destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging,", "Setup parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if", "print('Parsing TeX source...') doc = tex.parse() texfile.close() # Go to destdir os.chdir(destdir) if", "string import argparse import logging import subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX", "argparse import logging import subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer", "from .ProblemPlasTeX import ProblemsetMacros from . import template def convert(problem, options=None): problembase =", "options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False", "# -*- coding: utf-8 -*- import re import os.path import string import argparse", ".ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from . import template def convert(problem,", "os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile = string.Template(options.destfile).safe_substitute(problem=problembase) imgbasedir = string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging()", "tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer = ProblemRenderer() if", "tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager']", "= 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none' tex.ownerDocument.config['images']['base-url'] = imgbasedir renderer =", "utf-8 -*- import re import os.path import string import argparse import logging import", "string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem #", "= string.Template(options.imgbasedir).safe_substitute(problem=problembase) if options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem", "import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from . import", "as templ: texfile = open(templ.get_file_name(), 'r') # Setup parser and renderer etc tex", "up template if necessary with template.Template(problem, language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(),", "# Annoying: I have not figured out any way of stopping the plasTeX", "texfile = open(templ.get_file_name(), 'r') # Setup parser and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile)", "to destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have not", "texfile = problem # Set up template if necessary with template.Template(problem, language=options.language, title=options.title)", "not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] =", "= ProblemRenderer() if not options.quiet: print('Parsing TeX source...') doc = tex.parse() texfile.close() #", "import subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import", "destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I have not figured", "renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers: tex.ownerDocument.userdata['noheaders']", "out any way of stopping the plasTeX # renderer from generating a .paux", "os.path import string import argparse import logging import subprocess import plasTeX.TeX import plasTeX.Logging", "True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled'] = False tex.ownerDocument.config['images']['imager'] = 'none'", "and renderer etc tex = plasTeX.TeX.TeX(myfile=texfile) ProblemsetMacros.init(tex) tex.ownerDocument.config['general']['copy-theme-extras'] = options.css if not options.headers:", "stopping the plasTeX # renderer from generating a .paux file if os.path.isfile('.paux'): os.remove('.paux')", "if not options.headers: tex.ownerDocument.userdata['noheaders'] = True tex.ownerDocument.config['files']['filename'] = destfile tex.ownerDocument.config['images']['filenames'] = 'img-$num(4)' tex.ownerDocument.config['images']['enabled']", "necessary with template.Template(problem, language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(), 'r') # Setup", "options.quiet: plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set up", "print('Rendering!') renderer.render(doc) # Annoying: I have not figured out any way of stopping", "plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX import ProblemsetMacros from .", "logging import subprocess import plasTeX.TeX import plasTeX.Logging from .ProblemPlasTeX import ProblemRenderer from .ProblemPlasTeX", "import template def convert(problem, options=None): problembase = os.path.splitext(os.path.basename(problem))[0] destdir = string.Template(options.destdir).safe_substitute(problem=problembase) destfile =", "# Go to destdir os.chdir(destdir) if not options.quiet: print('Rendering!') renderer.render(doc) # Annoying: I", "plasTeX.Logging.disableLogging() else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set up template", "way of stopping the plasTeX # renderer from generating a .paux file if", "of stopping the plasTeX # renderer from generating a .paux file if os.path.isfile('.paux'):", "else: plasTeX.Logging.getLogger().setLevel(getattr(logging, options.loglevel.upper())) plasTeX.Logging.getLogger('status').setLevel(getattr(logging, options.loglevel.upper())) texfile = problem # Set up template if", "if necessary with template.Template(problem, language=options.language, title=options.title) as templ: texfile = open(templ.get_file_name(), 'r') #", "renderer = ProblemRenderer() if not options.quiet: print('Parsing TeX source...') doc = tex.parse() texfile.close()", "TeX source...') doc = tex.parse() texfile.close() # Go to destdir os.chdir(destdir) if not" ]
[ "self.you else: return self.me def pack_state(self, turn): if self.me == 1: return self.mine,", "\", self.mine) print(\"FOE: \", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move: \", my_move)", "int(message[0]) if (turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1 =", "time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count = 4 self.mine = 0", "init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me) print((sys.stderr, 'starting", "board import Board INF = 1.0e100 CORNERS = [(0, 0), (0, 7), (7,", "self.corners_bits = sum(self.board.spaces[i] for i in CORNERS) self.mine = 0 self.foe = 0", "1: self.board = Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine) return turn #", "moves: \", valid_moves) print(\"mine: \", self.mine) print(\"FOE: \", self.foe) print(self.board) my_move = self.move(self.pack_state(turn))", "move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player", "7), (6, 0), (6, 1), (6, 6), (6, 7), (7, 1), (7, 6)]", "player=None): \"\"\" state is: (p1_placed, p2_placed, whose_turn) \"\"\" if player is None: player", "return self.you else: return self.me def pack_state(self, turn): if self.me == 1: return", "j in range(8): color = int(message[count]) if color == self.me: self.mine += self.board.spaces[(i,", "CORNERS) self.mine = 0 self.foe = 0 def get_valid_moves(self, state, player=None): \"\"\" state", "< 4: centers_remaning_bits = self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player", "(0, -1), (0, 1), (1, -1), (1, 0), (1, 1)] class Player(object): def", "== self.you: self.foe += self.board.spaces[(i, j)] count += 1 # update board if", "= float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2)", "= me, you self.round = 0 # handling the board self.board = Board()", "i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in CORNERS) self.mine = 0", "-1), (1, 0), (1, 1)] class Player(object): def __init__(self, me, you): self.me, self.you", "(0, 3), (0, 4), (0, 5), (2, 0), (3, 0), (4, 0), (5,", "print(\"FOE: \", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg =", "0), (7, 7)] CENTERS = [(3, 3), (3, 4), (4, 3), (4, 4)]", "valid_moves) print(\"mine: \", self.mine) print(\"FOE: \", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move:", "+= self.board.spaces[(i, j)] count += 1 # update board if self.me == 1:", "turn == self.me: print(\"============\") print(\"Round: \", self.round) # print(\"Valid moves: \", valid_moves) print(\"mine:", "pack_state(self, turn): if self.me == 1: return self.mine, self.foe, turn else: return self.foe,", "= sum(self.board.spaces[i] for i in CORNERS) self.mine = 0 self.foe = 0 def", "self.you: self.foe += self.board.spaces[(i, j)] count += 1 # update board if self.me", "(4, 0), (5, 0), (2, 7), (3, 7), (4, 7), (5, 7), (7,", "turn): if self.me == 1: return self.mine, self.foe, turn else: return self.foe, self.mine,", "color == self.you: self.foe += self.board.spaces[(i, j)] count += 1 # update board", "# print(\"Valid moves: \", valid_moves) print(\"mine: \", self.mine) print(\"FOE: \", self.foe) print(self.board) my_move", "read_message(sock) if turn == self.me: print(\"============\") print(\"Round: \", self.round) # print(\"Valid moves: \",", "(turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1 = float(message[2]) self.t2", "socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me) print((sys.stderr, 'starting up on %s", "self.centers_bits = sum(self.board.spaces[i] for i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in", "i in CORNERS) self.mine = 0 self.foe = 0 def get_valid_moves(self, state, player=None):", "message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn == -999): time.sleep(1) self.save_tree() sys.exit()", "self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1])", "# update board if self.me == 1: self.board = Board(self.mine, self.foe) else: self.board", "(2, 7), (3, 7), (4, 7), (5, 7), (7, 2), (7, 3), (7,", "\"\"\" if player is None: player = state[2] if self.round < 4: centers_remaning_bits", "state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0], state[1]) else:", "import Board INF = 1.0e100 CORNERS = [(0, 0), (0, 7), (7, 0),", "ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and {}\".format(ind, thing)) return sock", "Board INF = 1.0e100 CORNERS = [(0, 0), (0, 7), (7, 0), (7,", "(6, 0), (6, 1), (6, 6), (6, 7), (7, 1), (7, 6)] G_EDGES", "def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me) print((sys.stderr,", "self.mine) print(\"FOE: \", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg", "sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me) print((sys.stderr, 'starting up", "self.round) # print(\"Valid moves: \", valid_moves) print(\"mine: \", self.mine) print(\"FOE: \", self.foe) print(self.board)", "(6, 7), (7, 1), (7, 6)] G_EDGES = [(0, 2), (0, 3), (0,", "+ self.me) print((sys.stderr, 'starting up on %s port ', server_address)) sock.connect(server_address) for ind,", "[(0, 1), (0, 6), (1, 0), (1, 1), (1, 6), (1, 7), (6,", "'starting up on %s port ', server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")):", "7), (7, 0), (7, 7)] CENTERS = [(3, 3), (3, 4), (4, 3),", "CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in CORNERS) self.mine = 0 self.foe =", "(7, 6)] G_EDGES = [(0, 2), (0, 3), (0, 4), (0, 5), (2,", "self.board.spaces[(i, j)] count += 1 # update board if self.me == 1: self.board", "{}\".format(ind, thing)) return sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if", "state[2] if self.round < 4: centers_remaning_bits = self.centers_bits - state[0] - state[1] return", "player = state[2] if self.round < 4: centers_remaning_bits = self.centers_bits - state[0] -", "3), (7, 4), (7, 5)] NEIGHBORS = [(-1, -1), (-1, 0), (-1, 1),", "def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn == -999): time.sleep(1)", "(hostname, 3333 + self.me) print((sys.stderr, 'starting up on %s port ', server_address)) sock.connect(server_address)", "read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn == -999): time.sleep(1) self.save_tree()", "Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine) return turn # create a random", "= state[2] if self.round < 4: centers_remaning_bits = self.centers_bits - state[0] - state[1]", "print(\"My move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if", "self.t2 = float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\",", "a random number generator sock = init_client(hostname) while True: turn = read_message(sock) if", "sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn == -999):", "Player(object): def __init__(self, me, you): self.me, self.you = me, you self.round = 0", "(7, 3), (7, 4), (7, 5)] NEIGHBORS = [(-1, -1), (-1, 0), (-1,", "the board self.board = Board() self.centers_bits = sum(self.board.spaces[i] for i in CENTERS) self.corners_bits", "[(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1,", "6), (1, 0), (1, 1), (1, 6), (1, 7), (6, 0), (6, 1),", "self.round = 0 # handling the board self.board = Board() self.centers_bits = sum(self.board.spaces[i]", "self.me def pack_state(self, turn): if self.me == 1: return self.mine, self.foe, turn else:", "NEIGHBORS = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1,", "whose_turn) \"\"\" if player is None: player = state[2] if self.round < 4:", "7), (4, 7), (5, 7), (7, 2), (7, 3), (7, 4), (7, 5)]", "{} and {}\".format(ind, thing)) return sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn =", "0), (3, 0), (4, 0), (5, 0), (2, 7), (3, 7), (4, 7),", "float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1)", "= [(0, 2), (0, 3), (0, 4), (0, 5), (2, 0), (3, 0),", "print(\"t1:\", self.t1) print(\"t2:\", self.t2) count = 4 self.mine = 0 self.foe = 0", "self.t2) count = 4 self.mine = 0 self.foe = 0 for i in", "3333 + self.me) print((sys.stderr, 'starting up on %s port ', server_address)) sock.connect(server_address) for", "if player is None: player = state[2] if self.round < 4: centers_remaning_bits =", "7)] CENTERS = [(3, 3), (3, 4), (4, 3), (4, 4)] DANGERS =", "(7, 0), (7, 7)] CENTERS = [(3, 3), (3, 4), (4, 3), (4,", "in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in CORNERS) self.mine = 0 self.foe", "== 1: return self.mine, self.foe, turn else: return self.foe, self.mine, turn def save_tree(self):", "self.mine = 0 self.foe = 0 for i in range(8): for j in", "(1, 1), (1, 6), (1, 7), (6, 0), (6, 1), (6, 6), (6,", "get_valid_moves(self, state, player=None): \"\"\" state is: (p1_placed, p2_placed, whose_turn) \"\"\" if player is", "sock.send(msg.encode()) def other_player(self, a_player): if a_player == self.me: return self.you else: return self.me", "self.me: return self.you else: return self.me def pack_state(self, turn): if self.me == 1:", "-1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0),", "\", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player ==", "and {}\".format(ind, thing)) return sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0])", "you): self.me, self.you = me, you self.round = 0 # handling the board", "(4, 3), (4, 4)] DANGERS = [(0, 1), (0, 6), (1, 0), (1,", "got {} and {}\".format(ind, thing)) return sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn", "self.round < 4: centers_remaning_bits = self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if", "(0, 7), (7, 0), (7, 7)] CENTERS = [(3, 3), (3, 4), (4,", "float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count", "2), (7, 3), (7, 4), (7, 5)] NEIGHBORS = [(-1, -1), (-1, 0),", "0), (6, 1), (6, 6), (6, 7), (7, 1), (7, 6)] G_EDGES =", "import time import socket import sys from board import Board INF = 1.0e100", "print(\"Round: \", self.round) # print(\"Valid moves: \", valid_moves) print(\"mine: \", self.mine) print(\"FOE: \",", "random number generator sock = init_client(hostname) while True: turn = read_message(sock) if turn", "my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player == self.me:", "enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and {}\".format(ind, thing)) return sock def read_message(sock): message", "import sys from board import Board INF = 1.0e100 CORNERS = [(0, 0),", "(5, 0), (2, 7), (3, 7), (4, 7), (5, 7), (7, 2), (7,", "in CORNERS) self.mine = 0 self.foe = 0 def get_valid_moves(self, state, player=None): \"\"\"", "= [(3, 3), (3, 4), (4, 3), (4, 4)] DANGERS = [(0, 1),", "in range(8): color = int(message[count]) if color == self.me: self.mine += self.board.spaces[(i, j)]", "int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\",", "sys.exit() self.round = int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn) print(\"current", "__init__(self, me, you): self.me, self.you = me, you self.round = 0 # handling", "generator sock = init_client(hostname) while True: turn = read_message(sock) if turn == self.me:", "self.mine += self.board.spaces[(i, j)] elif color == self.you: self.foe += self.board.spaces[(i, j)] count", "server_address = (hostname, 3333 + self.me) print((sys.stderr, 'starting up on %s port ',", "(0, 5), (2, 0), (3, 0), (4, 0), (5, 0), (2, 7), (3,", "1), (1, 6), (1, 7), (6, 0), (6, 1), (6, 6), (6, 7),", "j)] elif color == self.you: self.foe += self.board.spaces[(i, j)] count += 1 #", "centers_remaning_bits = self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1:", "= 0 self.foe = 0 for i in range(8): for j in range(8):", "socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me) print((sys.stderr, 'starting up on %s port", "= sum(self.board.spaces[i] for i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in CORNERS)", "print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count = 4 self.mine", "= float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\",", "6), (6, 7), (7, 1), (7, 6)] G_EDGES = [(0, 2), (0, 3),", "import socket import sys from board import Board INF = 1.0e100 CORNERS =", "sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round =", "self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0],", "time import socket import sys from board import Board INF = 1.0e100 CORNERS", "print(\"when init got {} and {}\".format(ind, thing)) return sock def read_message(sock): message =", "== 1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree()", "(5, 7), (7, 2), (7, 3), (7, 4), (7, 5)] NEIGHBORS = [(-1,", "0), (0, 7), (7, 0), (7, 7)] CENTERS = [(3, 3), (3, 4),", "self.t1 = float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\", self.round)", "else: return self.me def pack_state(self, turn): if self.me == 1: return self.mine, self.foe,", "print(\"Valid moves: \", valid_moves) print(\"mine: \", self.mine) print(\"FOE: \", self.foe) print(self.board) my_move =", "time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count = 4 self.mine =", "count = 4 self.mine = 0 self.foe = 0 for i in range(8):", "board if self.me == 1: self.board = Board(self.mine, self.foe) else: self.board = Board(self.foe,", "state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1],", "if self.round < 4: centers_remaning_bits = self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits)", "init_client(hostname) while True: turn = read_message(sock) if turn == self.me: print(\"============\") print(\"Round: \",", "\"\"\" state is: (p1_placed, p2_placed, whose_turn) \"\"\" if player is None: player =", "def get_valid_moves(self, state, player=None): \"\"\" state is: (p1_placed, p2_placed, whose_turn) \"\"\" if player", "(6, 6), (6, 7), (7, 1), (7, 6)] G_EDGES = [(0, 2), (0,", "= int(message[0]) if (turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1", "(0, 6), (1, 0), (1, 1), (1, 6), (1, 7), (6, 0), (6,", "1)] class Player(object): def __init__(self, me, you): self.me, self.you = me, you self.round", "== self.me: self.mine += self.board.spaces[(i, j)] elif color == self.you: self.foe += self.board.spaces[(i,", "return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname):", "return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0])", "if player == 1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self,", "state is: (p1_placed, p2_placed, whose_turn) \"\"\" if player is None: player = state[2]", "= 0 # handling the board self.board = Board() self.centers_bits = sum(self.board.spaces[i] for", "= 0 def get_valid_moves(self, state, player=None): \"\"\" state is: (p1_placed, p2_placed, whose_turn) \"\"\"", "4)] DANGERS = [(0, 1), (0, 6), (1, 0), (1, 1), (1, 6),", "a_player == self.me: return self.you else: return self.me def pack_state(self, turn): if self.me", "0), (1, 1), (1, 6), (1, 7), (6, 0), (6, 1), (6, 6),", "4), (4, 3), (4, 4)] DANGERS = [(0, 1), (0, 6), (1, 0),", "= self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return", "print(\"============\") print(\"Round: \", self.round) # print(\"Valid moves: \", valid_moves) print(\"mine: \", self.mine) print(\"FOE:", "turn = read_message(sock) if turn == self.me: print(\"============\") print(\"Round: \", self.round) # print(\"Valid", "else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET,", "= int(message[count]) if color == self.me: self.mine += self.board.spaces[(i, j)] elif color ==", "range(8): for j in range(8): color = int(message[count]) if color == self.me: self.mine", "CENTERS = [(3, 3), (3, 4), (4, 3), (4, 4)] DANGERS = [(0,", "my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player == self.me: return self.you else: return", "if a_player == self.me: return self.you else: return self.me def pack_state(self, turn): if", "player is None: player = state[2] if self.round < 4: centers_remaning_bits = self.centers_bits", "port ', server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {}", "def pack_state(self, turn): if self.me == 1: return self.mine, self.foe, turn else: return", "print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode())", "self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def", "if color == self.me: self.mine += self.board.spaces[(i, j)] elif color == self.you: self.foe", "board self.board = Board() self.centers_bits = sum(self.board.spaces[i] for i in CENTERS) self.corners_bits =", "state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname): sock =", "i in range(8): for j in range(8): color = int(message[count]) if color ==", "return turn # create a random number generator sock = init_client(hostname) while True:", "handling the board self.board = Board() self.centers_bits = sum(self.board.spaces[i] for i in CENTERS)", "G_EDGES = [(0, 2), (0, 3), (0, 4), (0, 5), (2, 0), (3,", "print(\"t2:\", self.t2) count = 4 self.mine = 0 self.foe = 0 for i", "4: centers_remaning_bits = self.centers_bits - state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player ==", "self.me == 1: return self.mine, self.foe, turn else: return self.foe, self.mine, turn def", "is None: player = state[2] if self.round < 4: centers_remaning_bits = self.centers_bits -", "(3, 4), (4, 3), (4, 4)] DANGERS = [(0, 1), (0, 6), (1,", "def __init__(self, me, you): self.me, self.you = me, you self.round = 0 #", "= [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1),", "print((sys.stderr, 'starting up on %s port ', server_address)) sock.connect(server_address) for ind, thing in", "self.foe, turn else: return self.foe, self.mine, turn def save_tree(self): pass def load_tree(self): pass", "self.me == 1: self.board = Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine) return", "self.board = Board() self.centers_bits = sum(self.board.spaces[i] for i in CENTERS) self.corners_bits = sum(self.board.spaces[i]", "= Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine) return turn # create a", "thing)) return sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn", "(1, 0), (1, 1)] class Player(object): def __init__(self, me, you): self.me, self.you =", "0 self.foe = 0 for i in range(8): for j in range(8): color", "turn = int(message[0]) if (turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1])", "def other_player(self, a_player): if a_player == self.me: return self.you else: return self.me def", "self.me) print((sys.stderr, 'starting up on %s port ', server_address)) sock.connect(server_address) for ind, thing", "other_player(self, a_player): if a_player == self.me: return self.you else: return self.me def pack_state(self,", "number generator sock = init_client(hostname) while True: turn = read_message(sock) if turn ==", "def play_game(self, hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname,", "me, you self.round = 0 # handling the board self.board = Board() self.centers_bits", "CORNERS = [(0, 0), (0, 7), (7, 0), (7, 7)] CENTERS = [(3,", "self.mine, self.foe, turn else: return self.foe, self.mine, turn def save_tree(self): pass def load_tree(self):", "# create a random number generator sock = init_client(hostname) while True: turn =", "True: turn = read_message(sock) if turn == self.me: print(\"============\") print(\"Round: \", self.round) #", "turn # create a random number generator sock = init_client(hostname) while True: turn", "7), (5, 7), (7, 2), (7, 3), (7, 4), (7, 5)] NEIGHBORS =", "-1), (0, 1), (1, -1), (1, 0), (1, 1)] class Player(object): def __init__(self,", "Board() self.centers_bits = sum(self.board.spaces[i] for i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i", "self.board = Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine) return turn # create", "(7, 1), (7, 6)] G_EDGES = [(0, 2), (0, 3), (0, 4), (0,", "return self.me def pack_state(self, turn): if self.me == 1: return self.mine, self.foe, turn", "0), (1, 1)] class Player(object): def __init__(self, me, you): self.me, self.you = me,", "self.t1) print(\"t2:\", self.t2) count = 4 self.mine = 0 self.foe = 0 for", "= sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round", "elif color == self.you: self.foe += self.board.spaces[(i, j)] count += 1 # update", "sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and {}\".format(ind, thing))", "msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player == self.me: return", "self.mine) return turn # create a random number generator sock = init_client(hostname) while", "sum(self.board.spaces[i] for i in CORNERS) self.mine = 0 self.foe = 0 def get_valid_moves(self,", "1), (6, 6), (6, 7), (7, 1), (7, 6)] G_EDGES = [(0, 2),", "== -999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1 = float(message[2]) self.t2 =", "(1, 7), (6, 0), (6, 1), (6, 6), (6, 7), (7, 1), (7,", "for i in CORNERS) self.mine = 0 self.foe = 0 def get_valid_moves(self, state,", "\", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0],", "1: return self.mine, self.foe, turn else: return self.foe, self.mine, turn def save_tree(self): pass", "self.save_tree() sys.exit() self.round = int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn)", "= int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn) print(\"current time:\", time.time())", "(-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1,", "5)] NEIGHBORS = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1),", "my_move = self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def", "== self.me: return self.you else: return self.me def pack_state(self, turn): if self.me ==", "%s port ', server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got", "print(\"mine: \", self.mine) print(\"FOE: \", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My move: \",", "print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count = 4 self.mine = 0 self.foe", "self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname): sock", "(7, 7)] CENTERS = [(3, 3), (3, 4), (4, 3), (4, 4)] DANGERS", "6)] G_EDGES = [(0, 2), (0, 3), (0, 4), (0, 5), (2, 0),", "hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 +", "(3, 7), (4, 7), (5, 7), (7, 2), (7, 3), (7, 4), (7,", "socket import sys from board import Board INF = 1.0e100 CORNERS = [(0,", "(1, 6), (1, 7), (6, 0), (6, 1), (6, 6), (6, 7), (7,", "\", valid_moves) print(\"mine: \", self.mine) print(\"FOE: \", self.foe) print(self.board) my_move = self.move(self.pack_state(turn)) print(\"My", "0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)]", "player == 1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname):", "self.foe = 0 def get_valid_moves(self, state, player=None): \"\"\" state is: (p1_placed, p2_placed, whose_turn)", "self.round = int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3]) print(\"turn\", turn) print(\"current time:\",", "1.0e100 CORNERS = [(0, 0), (0, 7), (7, 0), (7, 7)] CENTERS =", "- state[0] - state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0], state[1])", "if self.me == 1: return self.mine, self.foe, turn else: return self.foe, self.mine, turn", "self.board.spaces[(i, j)] elif color == self.you: self.foe += self.board.spaces[(i, j)] count += 1", "(-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)] class", "(3, 0), (4, 0), (5, 0), (2, 7), (3, 7), (4, 7), (5,", "int(message[count]) if color == self.me: self.mine += self.board.spaces[(i, j)] elif color == self.you:", "2), (0, 3), (0, 4), (0, 5), (2, 0), (3, 0), (4, 0),", "3), (4, 4)] DANGERS = [(0, 1), (0, 6), (1, 0), (1, 1),", "# handling the board self.board = Board() self.centers_bits = sum(self.board.spaces[i] for i in", "Board(self.foe, self.mine) return turn # create a random number generator sock = init_client(hostname)", "me, you): self.me, self.you = me, you self.round = 0 # handling the", "self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player):", "for i in range(8): for j in range(8): color = int(message[count]) if color", "sock = init_client(hostname) while True: turn = read_message(sock) if turn == self.me: print(\"============\")", "for j in range(8): color = int(message[count]) if color == self.me: self.mine +=", "DANGERS = [(0, 1), (0, 6), (1, 0), (1, 1), (1, 6), (1,", "7), (7, 2), (7, 3), (7, 4), (7, 5)] NEIGHBORS = [(-1, -1),", "= Board(self.foe, self.mine) return turn # create a random number generator sock =", "create a random number generator sock = init_client(hostname) while True: turn = read_message(sock)", "if turn == self.me: print(\"============\") print(\"Round: \", self.round) # print(\"Valid moves: \", valid_moves)", "', server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and", "state, player=None): \"\"\" state is: (p1_placed, p2_placed, whose_turn) \"\"\" if player is None:", "\", self.round) # print(\"Valid moves: \", valid_moves) print(\"mine: \", self.mine) print(\"FOE: \", self.foe)", "4), (0, 5), (2, 0), (3, 0), (4, 0), (5, 0), (2, 7),", "= 0 for i in range(8): for j in range(8): color = int(message[count])", "4 self.mine = 0 self.foe = 0 for i in range(8): for j", "print(\"turn\", turn) print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count =", "1), (0, 6), (1, 0), (1, 1), (1, 6), (1, 7), (6, 0),", "up on %s port ', server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when", "self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address", "= [(0, 0), (0, 7), (7, 0), (7, 7)] CENTERS = [(3, 3),", "(1, -1), (1, 0), (1, 1)] class Player(object): def __init__(self, me, you): self.me,", "on %s port ', server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init", "(0, 4), (0, 5), (2, 0), (3, 0), (4, 0), (5, 0), (2,", "color == self.me: self.mine += self.board.spaces[(i, j)] elif color == self.you: self.foe +=", "-999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3])", "self.foe = 0 for i in range(8): for j in range(8): color =", "if (turn == -999): time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1 = float(message[2])", "(6, 1), (6, 6), (6, 7), (7, 1), (7, 6)] G_EDGES = [(0,", "[(0, 0), (0, 7), (7, 0), (7, 7)] CENTERS = [(3, 3), (3,", "self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me)", "a_player): if a_player == self.me: return self.you else: return self.me def pack_state(self, turn):", "thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and {}\".format(ind, thing)) return sock def", "= Board() self.centers_bits = sum(self.board.spaces[i] for i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for", "else: self.board = Board(self.foe, self.mine) return turn # create a random number generator", "(0, 1), (1, -1), (1, 0), (1, 1)] class Player(object): def __init__(self, me,", "self.you = me, you self.round = 0 # handling the board self.board =", "(1, 1)] class Player(object): def __init__(self, me, you): self.me, self.you = me, you", "= \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player == self.me: return self.you", "self.me: self.mine += self.board.spaces[(i, j)] elif color == self.you: self.foe += self.board.spaces[(i, j)]", "7), (7, 1), (7, 6)] G_EDGES = [(0, 2), (0, 3), (0, 4),", "self.me: print(\"============\") print(\"Round: \", self.round) # print(\"Valid moves: \", valid_moves) print(\"mine: \", self.mine)", "(4, 4)] DANGERS = [(0, 1), (0, 6), (1, 0), (1, 1), (1,", "3), (3, 4), (4, 3), (4, 4)] DANGERS = [(0, 1), (0, 6),", "in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and {}\".format(ind, thing)) return sock def read_message(sock):", "self.mine = 0 self.foe = 0 def get_valid_moves(self, state, player=None): \"\"\" state is:", "class Player(object): def __init__(self, me, you): self.me, self.you = me, you self.round =", "return sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\") turn = int(message[0]) if (turn ==", "- state[1] return self.board.bits_to_tuples(centers_remaning_bits) if player == 1: return self.board.legal_actions(state[0], state[1]) else: return", "is: (p1_placed, p2_placed, whose_turn) \"\"\" if player is None: player = state[2] if", "= socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333 + self.me) print((sys.stderr, 'starting up on", "6), (1, 7), (6, 0), (6, 1), (6, 6), (6, 7), (7, 1),", "0), (5, 0), (2, 7), (3, 7), (4, 7), (5, 7), (7, 2),", "1), (1, -1), (1, 0), (1, 1)] class Player(object): def __init__(self, me, you):", "time.sleep(1) self.save_tree() sys.exit() self.round = int(message[1]) self.t1 = float(message[2]) self.t2 = float(message[3]) print(\"turn\",", "0), (2, 7), (3, 7), (4, 7), (5, 7), (7, 2), (7, 3),", "[(3, 3), (3, 4), (4, 3), (4, 4)] DANGERS = [(0, 1), (0,", "= (hostname, 3333 + self.me) print((sys.stderr, 'starting up on %s port ', server_address))", "for i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in CORNERS) self.mine =", "sys from board import Board INF = 1.0e100 CORNERS = [(0, 0), (0,", "\"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self, a_player): if a_player == self.me: return self.you else:", "1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)] class Player(object):", "0 self.foe = 0 def get_valid_moves(self, state, player=None): \"\"\" state is: (p1_placed, p2_placed,", "= 1.0e100 CORNERS = [(0, 0), (0, 7), (7, 0), (7, 7)] CENTERS", "+= 1 # update board if self.me == 1: self.board = Board(self.mine, self.foe)", "= 0 self.foe = 0 def get_valid_moves(self, state, player=None): \"\"\" state is: (p1_placed,", "(7, 5)] NEIGHBORS = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0,", "in range(8): for j in range(8): color = int(message[count]) if color == self.me:", "self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count = 4 self.mine = 0 self.foe =", "7), (3, 7), (4, 7), (5, 7), (7, 2), (7, 3), (7, 4),", "0 def get_valid_moves(self, state, player=None): \"\"\" state is: (p1_placed, p2_placed, whose_turn) \"\"\" if", "you self.round = 0 # handling the board self.board = Board() self.centers_bits =", "1 # update board if self.me == 1: self.board = Board(self.mine, self.foe) else:", "color = int(message[count]) if color == self.me: self.mine += self.board.spaces[(i, j)] elif color", "(7, 4), (7, 5)] NEIGHBORS = [(-1, -1), (-1, 0), (-1, 1), (0,", "= init_client(hostname) while True: turn = read_message(sock) if turn == self.me: print(\"============\") print(\"Round:", "0), (4, 0), (5, 0), (2, 7), (3, 7), (4, 7), (5, 7),", "4), (7, 5)] NEIGHBORS = [(-1, -1), (-1, 0), (-1, 1), (0, -1),", "1: return self.board.legal_actions(state[0], state[1]) else: return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def", "j)] count += 1 # update board if self.me == 1: self.board =", "from board import Board INF = 1.0e100 CORNERS = [(0, 0), (0, 7),", "= [(0, 1), (0, 6), (1, 0), (1, 1), (1, 6), (1, 7),", "return self.mine, self.foe, turn else: return self.foe, self.mine, turn def save_tree(self): pass def", "turn) print(\"current time:\", time.time()) print(\"round:\", self.round) print(\"t1:\", self.t1) print(\"t2:\", self.t2) count = 4", "self.board = Board(self.foe, self.mine) return turn # create a random number generator sock", "sum(self.board.spaces[i] for i in CENTERS) self.corners_bits = sum(self.board.spaces[i] for i in CORNERS) self.mine", "= read_message(sock) if turn == self.me: print(\"============\") print(\"Round: \", self.round) # print(\"Valid moves:", "+= self.board.spaces[(i, j)] elif color == self.you: self.foe += self.board.spaces[(i, j)] count +=", "range(8): color = int(message[count]) if color == self.me: self.mine += self.board.spaces[(i, j)] elif", "[(0, 2), (0, 3), (0, 4), (0, 5), (2, 0), (3, 0), (4,", "init got {} and {}\".format(ind, thing)) return sock def read_message(sock): message = sock.recv(1024).decode().split(\"\\n\")", "while True: turn = read_message(sock) if turn == self.me: print(\"============\") print(\"Round: \", self.round)", "(7, 2), (7, 3), (7, 4), (7, 5)] NEIGHBORS = [(-1, -1), (-1,", "(1, 0), (1, 1), (1, 6), (1, 7), (6, 0), (6, 1), (6,", "update board if self.me == 1: self.board = Board(self.mine, self.foe) else: self.board =", "server_address)) sock.connect(server_address) for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and {}\".format(ind,", "play_game(self, hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address = (hostname, 3333", "== self.me: print(\"============\") print(\"Round: \", self.round) # print(\"Valid moves: \", valid_moves) print(\"mine: \",", "count += 1 # update board if self.me == 1: self.board = Board(self.mine,", "3), (0, 4), (0, 5), (2, 0), (3, 0), (4, 0), (5, 0),", "= 4 self.mine = 0 self.foe = 0 for i in range(8): for", "(p1_placed, p2_placed, whose_turn) \"\"\" if player is None: player = state[2] if self.round", "1), (7, 6)] G_EDGES = [(0, 2), (0, 3), (0, 4), (0, 5),", "return self.board.legal_actions(state[1], state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)", "0 for i in range(8): for j in range(8): color = int(message[count]) if", "== 1: self.board = Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine) return turn", "None: player = state[2] if self.round < 4: centers_remaning_bits = self.centers_bits - state[0]", "state[0]) def play_game(self, hostname): self.load_tree() def init_client(hostname): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_address =", "5), (2, 0), (3, 0), (4, 0), (5, 0), (2, 7), (3, 7),", "self.foe += self.board.spaces[(i, j)] count += 1 # update board if self.me ==", "if self.me == 1: self.board = Board(self.mine, self.foe) else: self.board = Board(self.foe, self.mine)", "INF = 1.0e100 CORNERS = [(0, 0), (0, 7), (7, 0), (7, 7)]", "self.me, self.you = me, you self.round = 0 # handling the board self.board", "(2, 0), (3, 0), (4, 0), (5, 0), (2, 7), (3, 7), (4,", "(4, 7), (5, 7), (7, 2), (7, 3), (7, 4), (7, 5)] NEIGHBORS", "0 # handling the board self.board = Board() self.centers_bits = sum(self.board.spaces[i] for i", "self.foe) else: self.board = Board(self.foe, self.mine) return turn # create a random number", "p2_placed, whose_turn) \"\"\" if player is None: player = state[2] if self.round <", "for ind, thing in enumerate(sock.recv(1024).decode().split(\"\\n\")): print(\"when init got {} and {}\".format(ind, thing)) return", "= self.move(self.pack_state(turn)) print(\"My move: \", my_move) msg = \"{}\\n{}\\n\".format(my_move[0], my_move[1]) sock.send(msg.encode()) def other_player(self," ]
[]
[ "serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons',", "models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address',", "models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ] operations = [ migrations.RenameField(", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers',", "model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name',", "by Django 3.0.9 on 2020-10-25 13:05 from django.db import migrations, models import django.db.models.deletion", "new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title',", "[ ('customers', '0001_initial'), ] operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel(", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'),", "# Generated by Django 3.0.9 on 2020-10-25 13:05 from django.db import migrations, models", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='customers.Customer')), ],", "null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True,", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='customers.Customer')), ], ),", "Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ] operations = [ migrations.RenameField( model_name='customer', old_name='name',", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ] operations", "), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.',", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)),", "operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True,", "name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position',", "related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location',", "migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses',", "old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)),", "Django 3.0.9 on 2020-10-25 13:05 from django.db import migrations, models import django.db.models.deletion class", "= [ ('customers', '0001_initial'), ] operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ),", "migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer',", "on 2020-10-25 13:05 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ] operations = [", "'0001_initial'), ] operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[", "13:05 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "= [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='customers.Customer')),", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ]", "dependencies = [ ('customers', '0001_initial'), ] operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company',", "('customers', '0001_initial'), ] operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson',", "('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel(", "max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[", "to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)),", "Generated by Django 3.0.9 on 2020-10-25 13:05 from django.db import migrations, models import", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('location', models.CharField(max_length=100)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='addresses', to='customers.Customer')), ], ), ]", "migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)),", "2020-10-25 13:05 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ] operations = [ migrations.RenameField( model_name='customer',", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('customers', '0001_initial'), ] operations =", "[ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ],", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer',", "<gh_stars>0 # Generated by Django 3.0.9 on 2020-10-25 13:05 from django.db import migrations,", "3.0.9 on 2020-10-25 13:05 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ), migrations.CreateModel( name='Address', fields=[ ('id',", "models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')), ], ),", "verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50, null=True)), ('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contact_persons', to='customers.Customer')),", "] operations = [ migrations.RenameField( model_name='customer', old_name='name', new_name='company', ), migrations.CreateModel( name='ContactPerson', fields=[ ('id',", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=50)), ('title', models.CharField(default='Mr./Ms.', max_length=10)), ('position', models.CharField(max_length=50," ]
[ "self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel: return True return False class UMRMachine(Object.OObject):", "on 15 Aug 2015 Universal Machines @author: NoNotCar ''' import Object import Img", "UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into other stuff. This shouldn't be in the", "value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class", "and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return", "fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not", "class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for", "as fish poo). IO: Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world):", "doc=\"Grinds items into 2 powder items. Consumes 500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]}", "doc=\"Freezes items (a slow process). Can freeze up to 10 items at once.", "self.fuel: return True return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into other", "elif self.fuel: return True return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into", "operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\")", "Consumes 500W when starting, and 100W to keep cool. IO: Both\" temperature=20 updatable=True", "and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)]", "and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object): img=Img.imgret2(\"UM/logo.png\") iscat=True doc=\"Universal Machines\"", "Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items (such", "name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special", "Aug 2015 Universal Machines @author: NoNotCar ''' import Object import Img import Buyers", "to 10 items at once. Consumes 500W when starting, and 100W to keep", "progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.output=[]", "y, owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1", "100W to keep cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x,", "for pair in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)]", "is3d=True hasio=\"both\" doc=\"Turns stuff into other stuff. This shouldn't be in the game.", "self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class WoodChips(Entity.ResourceB):", "self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class", "Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in range(5)+range(5)[::-1]]", "500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class", "True return False elif self.fuel: return True return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\"", "to destroy items (such as fish poo). IO: Input (2 recommended)\" hasio=\"input\" updatable=True", "class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into other stuff. This shouldn't be in", "temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[]", "<reponame>Nouranium/Monolith<filename>Monolith/UM.py ''' Created on 15 Aug 2015 Universal Machines @author: NoNotCar ''' import", "self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object): img=Img.imgret2(\"UM/logo.png\") iscat=True doc=\"Universal", "other stuff. This shouldn't be in the game. IO: Both\" recipes={} progress=0 ent=None", "fish poo). IO: Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if", "not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name)", "Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process). Can freeze up to 10 items", "numproducts=1 def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if", "return True return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\"", "items at once. Consumes 500W when starting, and 100W to keep cool. IO:", "return True return False class UMCategory(object): img=Img.imgret2(\"UM/logo.png\") iscat=True doc=\"Universal Machines\" def __init__(self): self.menu=[Buyers.ObjBuyer(Incinerator,1000),Buyers.ObjBuyer(Grinder,500),Buyers.ObjBuyer(Fridge,200)]", "self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0:", "into 2 powder items. Consumes 500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2", "''' import Object import Img import Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\")", "pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name)", "and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class WoodChips(Entity.ResourceB): name=\"Wood", "NoNotCar ''' import Object import Img import Buyers import Entity class Incinerator(Object.OObject): is3d=True", "self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1", "2 powder items. Consumes 500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def", "len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object): img=Img.imgret2(\"UM/logo.png\") iscat=True doc=\"Universal Machines\" def", "destroy items (such as fish poo). IO: Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30}", "2015 Universal Machines @author: NoNotCar ''' import Object import Img import Buyers import", "recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner", "(a slow process). Can freeze up to 10 items at once. Consumes 500W", "def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif", "Img import Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy", "_ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0]", "not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\"", "IO: Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x, y, owner): self.x=x", "while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine):", "pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:])", "self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif", "return False elif self.fuel: return True return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns", "imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder items.", "x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and", "in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1]", "Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world):", "update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return", "shouldn't be in the game. IO: Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def", "game. IO: Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x, y, owner):", "__init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress and", "self.updatable=True return True return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB):", "powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow", "get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process). Can freeze", "if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _", "once. Consumes 500W when starting, and 100W to keep cool. IO: Both\" temperature=20", "Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x, y, owner): self.x=x self.y=y", "IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x self.y=y", "IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes", "Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items (such as fish", "owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif", "updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[]", "self.temperature+=1 for pair in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output:", "name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine):", "at once. Consumes 500W when starting, and 100W to keep cool. IO: Both\"", "FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class", "and not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0", "return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into other stuff. This shouldn't", "elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10:", "self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel: return True return False class", "to keep cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y,", "if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1", "return True return False elif self.fuel: return True return False class UMRMachine(Object.OObject): is3d=True", "hasio=\"both\" doc=\"Turns stuff into other stuff. This shouldn't be in the game. IO:", "''' Created on 15 Aug 2015 Universal Machines @author: NoNotCar ''' import Object", "Machines @author: NoNotCar ''' import Object import Img import Buyers import Entity class", "def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y)", "self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0", "self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not", "starting, and 100W to keep cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def", "fuel to destroy items (such as fish poo). IO: Input (2 recommended)\" hasio=\"input\"", "self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True", "Consumes 500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10]", "self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair in", "WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen", "recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a", "Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items (such as fish poo). IO:", "img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\"", "Universal Machines @author: NoNotCar ''' import Object import Img import Buyers import Entity", "value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\")", "process). Can freeze up to 10 items at once. Consumes 500W when starting,", "self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return", "y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500):", "import Img import Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to", "doc=\"Turns stuff into other stuff. This shouldn't be in the game. IO: Both\"", "and 100W to keep cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self,", "for _ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent:", "if pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if", "if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel: return True return", "powder items. Consumes 500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world):", "doc=\"Burns fuel to destroy items (such as fish poo). IO: Input (2 recommended)\"", "self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None", "name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\"", "cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x", "class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB):", "Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds", "self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if pair[1]:", "pair in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair)", "Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items", "self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class", "Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen", "up to 10 items at once. Consumes 500W when starting, and 100W to", "self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for", "return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process). Can freeze up", "False elif self.fuel: return True return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff", "the game. IO: Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x, y,", "self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and", "Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder", "This shouldn't be in the game. IO: Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1", "img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds items", "slow process). Can freeze up to 10 items at once. Consumes 500W when", "input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object): img=Img.imgret2(\"UM/logo.png\")", "world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if", "import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items (such as", "is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items (such as fish poo). IO: Input", "def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return", "class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds items into 2", "Can freeze up to 10 items at once. Consumes 500W when starting, and", "range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True", "updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if", "import Object import Img import Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns", "and not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if", "False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into other stuff. This shouldn't be", "def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60", "not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True", "input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel:", "in range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder items. Consumes 500W while operating. IO:", "def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if", "class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600", "self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and", "if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class", "img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" %", "recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if", "not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel: return True return False", "self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _ in", "__init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20", "update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for", "value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds items into", "Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def", "name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n in", "pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and", "recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def", "self.owner=owner self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not", "15 Aug 2015 Universal Machines @author: NoNotCar ''' import Object import Img import", "if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object): img=Img.imgret2(\"UM/logo.png\") iscat=True", "in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def", "(such as fish poo). IO: Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def", "in the game. IO: Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self, x,", "self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return", "return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10", "n in range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder items. Consumes 500W while operating.", "stuff into other stuff. This shouldn't be in the game. IO: Both\" recipes={}", "10 items at once. Consumes 500W when starting, and 100W to keep cool.", "be in the game. IO: Both\" recipes={} progress=0 ent=None powerusage=0 numproducts=1 def __init__(self,", "def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not", "str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder items. Consumes 500W", "if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel: return", "value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n))", "self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and not", "self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage): self.progress-=1 elif self.progress==0 and not self.output:", "self.progress-=1 elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False", "for n in range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder items. Consumes 500W while", "poo). IO: Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel:", "class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process). Can freeze up to 10", "self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False", "x, y, owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress and self.owner.get_power(self.powerusage):", "(2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent):", "class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items (such as fish poo).", "Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\"", "self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\")", "True return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into other stuff. This", "def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world): if self.progress", "IO: Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1", "update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100):", "img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items (such as fish poo). IO: Input (2", "if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True", "% str(n)) for n in range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder items. Consumes", "self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if pair[1]: pair[1]-=1 elif", "and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair", "def input(self,ent): if self.recipes.has_key(ent.name) and len(self.inv)<10: self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object):", "img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process). Can freeze up to 10 items at", "self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if world.anitick%8==0: self.temperature-=1 elif self.temperature<20", "False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class", "FrozenSpecialFish(Entity.ResourceB): name=\"Frozen Special Fish\" img=Img.imgret2(\"UM/FroSpFish.png\") value=600 class Grinder(UMRMachine): imgs=[Img.imgret2(\"UM/Grinder%s.png\" % str(n)) for n", "range(5)+range(5)[::-1]] doc=\"Grinds items into 2 powder items. Consumes 500W while operating. IO: Both\"", "numproducts=2 def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process).", "True return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\")", "keep cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner):", "Object import Img import Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel", "Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\")", "items into 2 powder items. Consumes 500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500", "500W when starting, and 100W to keep cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special", "self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and", "self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process). Can freeze up to", "self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False", "self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and not self.output: self.output=[pair[0](self.x,self.y)] self.inv.remove(pair) def input(self,ent):", "elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if pair[1]: pair[1]-=1", "hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name):", "@author: NoNotCar ''' import Object import Img import Buyers import Entity class Incinerator(Object.OObject):", "items (such as fish poo). IO: Input (2 recommended)\" hasio=\"input\" updatable=True fuels={\"Woodpile\":60,\"WoodpileSp\":30} fuel=0", "input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False", "if world.anitick%8==0: self.temperature-=1 elif self.temperature<20 and not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]:", "ent=None powerusage=0 numproducts=1 def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.output=[] def", "owner): self.x=x self.y=y self.owner=owner self.inv=[] self.output=[] def update(self,world): if self.temperature>-20 and self.owner.get_power(500): if", "return True return False class UMRMachine(Object.OObject): is3d=True hasio=\"both\" doc=\"Turns stuff into other stuff.", "into other stuff. This shouldn't be in the game. IO: Both\" recipes={} progress=0", "Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]} def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner", "powerusage=0 numproducts=1 def __init__(self, x, y, owner): self.x=x self.y=y self.owner=owner self.output=[] def update(self,world):", "self.ent=None self.updatable=False def input(self,ent): if self.recipes.has_key(ent.name) and not self.ent: self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return", "Created on 15 Aug 2015 Universal Machines @author: NoNotCar ''' import Object import", "items. Consumes 500W while operating. IO: Both\" recipes={\"Woodpile\":[WoodChips,240],\"WoodpileSp\":[WoodChips,240],\"Wheat\":[Flour,600]} powerusage=500 numproducts=2 def get_img(self,world): return", "class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40 class FrozenSpecialFish(Entity.ResourceB):", "when starting, and 100W to keep cool. IO: Both\" temperature=20 updatable=True recipes={\"Fish\":[FrozenFish,3600],\"Special Fish\":[FrozenSpecialFish,7200]}", "def get_img(self,world): return self.imgs[self.progress//2%10] class Fridge(UMRMachine): img=Img.imgret2(\"UM/Fridge.png\") doc=\"Freezes items (a slow process). Can", "elif self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False def", "img=Img.imgret2(\"UM/WoodChip.png\") value=20 class Flour(Entity.ResourceB): name=\"Flour\" img=Img.imgret2(\"UM/Flour.png\") value=10 class FrozenFish(Entity.ResourceB): name=\"Frozen Fish\" img=Img.imgret2(\"UM/FroFish.png\") value=40", "self.ent=self.recipes[ent.name][0] self.progress=self.recipes[ent.name][1] self.updatable=True return True return False class WoodChips(Entity.ResourceB): name=\"Wood Chips\" img=Img.imgret2(\"UM/WoodChip.png\") value=20", "items (a slow process). Can freeze up to 10 items at once. Consumes", "not self.owner.get_power(100): self.temperature+=1 for pair in self.inv[:]: if pair[1]: pair[1]-=1 elif pair[1]==0 and", "stuff. This shouldn't be in the game. IO: Both\" recipes={} progress=0 ent=None powerusage=0", "self.inv.append(self.recipes[ent.name][:]) return True return False class UMCategory(object): img=Img.imgret2(\"UM/logo.png\") iscat=True doc=\"Universal Machines\" def __init__(self):", "import Buyers import Entity class Incinerator(Object.OObject): is3d=True img=Img.imgret2(\"UM/Incinerator.png\") doc=\"Burns fuel to destroy items", "freeze up to 10 items at once. Consumes 500W when starting, and 100W", "self.fuels.has_key(ent.name): if not self.fuel: self.fuel=self.fuels[ent.name]*60 return True return False elif self.fuel: return True", "fuel=0 def update(self,world): if self.fuel: self.fuel-=1 def input(self,ent): if self.fuels.has_key(ent.name): if not self.fuel:", "self.progress==0 and not self.output: self.output=[self.ent(self.x,self.y) for _ in range(self.numproducts)] self.ent=None self.updatable=False def input(self,ent):" ]
[ "'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email', field=models.EmailField(blank=True, default='', max_length=254, verbose_name='Email'), ), ]", "2018-02-27 18:26 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration):", "-*- # Generated by Django 1.11.9 on 2018-02-27 18:26 from __future__ import unicode_literals", "migrations, models class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations = [", "utf-8 -*- # Generated by Django 1.11.9 on 2018-02-27 18:26 from __future__ import", "] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody',", "18:26 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies", "Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name':", "models class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions(", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations =", "dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category',", "Django 1.11.9 on 2018-02-27 18:26 from __future__ import unicode_literals from django.db import migrations,", "'0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField(", "Generated by Django 1.11.9 on 2018-02-27 18:26 from __future__ import unicode_literals from django.db", "import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('publicbody',", "migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email', field=models.EmailField(blank=True, default='', max_length=254,", "= [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email', field=models.EmailField(blank=True,", "('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ),", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations", "unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'),", "name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email', field=models.EmailField(blank=True, default='', max_length=254, verbose_name='Email'),", "= [ ('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural':", "-*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-02-27 18:26 from", "[ ('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'},", "# Generated by Django 1.11.9 on 2018-02-27 18:26 from __future__ import unicode_literals from", "# -*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-02-27 18:26", "from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ]", "options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email', field=models.EmailField(blank=True, default='', max_length=254, verbose_name='Email'), ),", "__future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "by Django 1.11.9 on 2018-02-27 18:26 from __future__ import unicode_literals from django.db import", "1.11.9 on 2018-02-27 18:26 from __future__ import unicode_literals from django.db import migrations, models", "operations = [ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email',", "class Migration(migrations.Migration): dependencies = [ ('publicbody', '0011_auto_20180105_1648'), ] operations = [ migrations.AlterModelOptions( name='category',", "on 2018-02-27 18:26 from __future__ import unicode_literals from django.db import migrations, models class", "<reponame>manonthemat/froide # -*- coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-02-27", "[ migrations.AlterModelOptions( name='category', options={'verbose_name': 'category', 'verbose_name_plural': 'categories'}, ), migrations.AlterField( model_name='publicbody', name='email', field=models.EmailField(blank=True, default='',", "coding: utf-8 -*- # Generated by Django 1.11.9 on 2018-02-27 18:26 from __future__" ]
[ "element in items: total = total + element print(element) print(\"total =\", total) \"\"\"", "<gh_stars>0 companies = [\"google\", \"apple\", \"microsoft\"] for element in companies: print(element) \"\"\" \"\"\"", "\"\"\" items = [0, 12, 30] total = 0 for element in items:", "items: total = total + element print(element) print(\"total =\", total) \"\"\" \"\"\" c", "= [0, 12, 30] total = 0 for element in items: total =", "\"microsoft\"] for element in companies: print(element) \"\"\" \"\"\" items = [0, 12, 30]", "[\"google\", \"apple\", \"microsoft\"] for element in companies: print(element) \"\"\" \"\"\" items = [0,", "\"\"\" \"\"\" items = [0, 12, 30] total = 0 for element in", "= total + element print(element) print(\"total =\", total) \"\"\" \"\"\" c = list(range(1,", "+ element print(element) print(\"total =\", total) \"\"\" \"\"\" c = list(range(1, 5)) print(c)", "companies = [\"google\", \"apple\", \"microsoft\"] for element in companies: print(element) \"\"\" \"\"\" items", "total = total + element print(element) print(\"total =\", total) \"\"\" \"\"\" c =", "= [\"google\", \"apple\", \"microsoft\"] for element in companies: print(element) \"\"\" \"\"\" items =", "[0, 12, 30] total = 0 for element in items: total = total", "\"apple\", \"microsoft\"] for element in companies: print(element) \"\"\" \"\"\" items = [0, 12,", "for element in companies: print(element) \"\"\" \"\"\" items = [0, 12, 30] total", "in companies: print(element) \"\"\" \"\"\" items = [0, 12, 30] total = 0", "12, 30] total = 0 for element in items: total = total +", "items = [0, 12, 30] total = 0 for element in items: total", "in items: total = total + element print(element) print(\"total =\", total) \"\"\" \"\"\"", "0 for element in items: total = total + element print(element) print(\"total =\",", "30] total = 0 for element in items: total = total + element", "print(element) \"\"\" \"\"\" items = [0, 12, 30] total = 0 for element", "for element in items: total = total + element print(element) print(\"total =\", total)", "element in companies: print(element) \"\"\" \"\"\" items = [0, 12, 30] total =", "total = 0 for element in items: total = total + element print(element)", "companies: print(element) \"\"\" \"\"\" items = [0, 12, 30] total = 0 for", "total + element print(element) print(\"total =\", total) \"\"\" \"\"\" c = list(range(1, 5))", "= 0 for element in items: total = total + element print(element) print(\"total" ]
[ "This value must be higher than it takes the critical section to execute.", "held. :param expires: We consider any existing lock older than ``expires`` seconds to", ":param key: The key against which the lock will be held. :param expires:", "= DEFAULT_BACKEND['connection'] # Load backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__))", "% (module, classname)) def parse_url(url, url_scheme): \"\"\"Parses a distributed lock backend URL.\"\"\" #", "\"\"\" def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is None: expires", "1:] try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing pylock", "be higher than it takes the critical section to execute. :param timeout: If", "\"%s\" does not define a \"%s\" class.' % (module, classname)) def parse_url(url, url_scheme):", "lock backend URL.\"\"\" # Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url)", "e)) try: return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\" does", "path = path.split('?', 2)[0] # Update with environment configuration. connection_info = { 'db':", "if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend class backend_class =", "the lock will be held. :param expires: We consider any existing lock older", "older than ``expires`` seconds to be invalid in order to detect crashed clients.", "if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection =", "backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend", "importing pylock backend module %s: \"%s\"' % (module, e)) try: return getattr(mod, classname)", "= 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock')", "'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock') class", "If another client has already obtained the lock, sleep for a maximum of", "existing lock older than ``expires`` seconds to be invalid in order to detect", "= { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock') class Lock(object): \"\"\"", "which the lock will be held. :param expires: We consider any existing lock", "__init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is None: expires = DEFAULT_EXPIRES", "of 0 means no wait (give up right away). \"\"\" def __init__(self, key,", "print \"Critical section\" :param key: The key against which the lock will be", "0 means no wait (give up right away). \"\"\" def __init__(self, key, expires=None,", "obtained the lock, sleep for a maximum of ``timeout`` seconds before giving up.", "Usage:: with Lock('my_lock'): print \"Critical section\" :param key: The key against which the", "expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is None: expires = DEFAULT_EXPIRES if timeout", "Load backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX,", "get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't a pylock backend", "url.path[1:] path = path.split('?', 2)[0] # Update with environment configuration. connection_info = {", "as e: raise ImproperlyConfigured('Error importing pylock backend module %s: \"%s\"' % (module, e))", "import_path[dot + 1:] try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error", "in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query strings. path = url.path[1:]", "ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't", "invalid in order to detect crashed clients. This value must be higher than", "logging from six.moves.urllib import parse from .backends import LockTimeout # noqa DEFAULT_TIMEOUT =", "ImproperlyConfigured('Error importing pylock backend module %s: \"%s\"' % (module, e)) try: return getattr(mod,", ":param expires: We consider any existing lock older than ``expires`` seconds to be", "# Update with environment configuration. connection_info = { 'db': path, 'user': url.username, 'password':", "= DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None:", "no wait (give up right away). \"\"\" def __init__(self, key, expires=None, timeout=None, backend_class_path=None,", "the lock, sleep for a maximum of ``timeout`` seconds before giving up. A", "timeout: If another client has already obtained the lock, sleep for a maximum", "DEFAULT_BACKEND['connection'] # Load backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key", "get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme)", "module %s: \"%s\"' % (module, e)) try: return getattr(mod, classname) except AttributeError: raise", "path = url.path[1:] path = path.split('?', 2)[0] # Update with environment configuration. connection_info", "client has already obtained the lock, sleep for a maximum of ``timeout`` seconds", "of ``timeout`` seconds before giving up. A value of 0 means no wait", "= import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing pylock backend module %s:", "another client has already obtained the lock, sleep for a maximum of ``timeout``", "def parse_url(url, url_scheme): \"\"\"Parses a distributed lock backend URL.\"\"\" # Register extra schemes", "giving up. A value of 0 means no wait (give up right away).", "define a \"%s\" class.' % (module, classname)) def parse_url(url, url_scheme): \"\"\"Parses a distributed", "= path.split('?', 2)[0] # Update with environment configuration. connection_info = { 'db': path,", "be held. :param expires: We consider any existing lock older than ``expires`` seconds", "10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger", "import_module import logging from six.moves.urllib import parse from .backends import LockTimeout # noqa", "LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND", "logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking. Usage:: with Lock('my_lock'): print \"Critical section\" :param", "= 60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock',", "crashed clients. This value must be higher than it takes the critical section", "strings. path = url.path[1:] path = path.split('?', 2)[0] # Update with environment configuration.", "extra schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query strings. path", "getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\" does not define a", "lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info)", "timeout=None, backend_class_path=None, backend_connection=None): if expires is None: expires = DEFAULT_EXPIRES if timeout is", "parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query strings. path = url.path[1:] path =", "raise ImproperlyConfigured('Error importing pylock backend module %s: \"%s\"' % (module, e)) try: return", "= \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key,", "expires is None: expires = DEFAULT_EXPIRES if timeout is None: timeout = DEFAULT_TIMEOUT", "be invalid in order to detect crashed clients. This value must be higher", "# Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] # Update", "<gh_stars>1-10 from importlib import import_module import logging from six.moves.urllib import parse from .backends", "the critical section to execute. :param timeout: If another client has already obtained", "'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking. Usage::", "is None: expires = DEFAULT_EXPIRES if timeout is None: timeout = DEFAULT_TIMEOUT if", "backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock", "backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend class backend_class = get_backend_class(backend_class_path)", "= parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client) def", "try: dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't a pylock backend module.\"", "= backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self,", "value of 0 means no wait (give up right away). \"\"\" def __init__(self,", "= import_path[:dot], import_path[dot + 1:] try: mod = import_module(module) except ImportError as e:", "'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking.", "timeout is None: timeout = DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class']", "away). \"\"\" def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is None:", "import_path[:dot], import_path[dot + 1:] try: mod = import_module(module) except ImportError as e: raise", "takes the critical section to execute. :param timeout: If another client has already", "try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing pylock backend", "backend URL.\"\"\" # Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) #", "Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] # Update with", "2)[0] # Update with environment configuration. connection_info = { 'db': path, 'user': url.username,", "(module, e)) try: return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\"", "url = parse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?',", "to execute. :param timeout: If another client has already obtained the lock, sleep", "= parse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0]", "self._lock = backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val,", "60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection':", "does not define a \"%s\" class.' % (module, classname)) def parse_url(url, url_scheme): \"\"\"Parses", "already obtained the lock, sleep for a maximum of ``timeout`` seconds before giving", "= url.path[1:] path = path.split('?', 2)[0] # Update with environment configuration. connection_info =", "%s: \"%s\"' % (module, e)) try: return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock", "\"\"\"Parses a distributed lock backend URL.\"\"\" # Register extra schemes in URLs. parse.uses_netloc.append(url_scheme)", "a distributed lock backend URL.\"\"\" # Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url", "url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire()", "+ 1:] try: mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing", "connection_info = { 'db': path, 'user': url.username, 'password': <PASSWORD>, 'host': url.hostname, 'port': url.port", "= get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection,", "sleep for a maximum of ``timeout`` seconds before giving up. A value of", "def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot", "return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\" does not define", "AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\" does not define a \"%s\" class.' %", "parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client) def __enter__(self):", "schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query strings. path =", "(give up right away). \"\"\" def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if", "a pylock backend module.\" % import_path) module, classname = import_path[:dot], import_path[dot + 1:]", "import_path) module, classname = import_path[:dot], import_path[dot + 1:] try: mod = import_module(module) except", "\"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires,", "import import_module import logging from six.moves.urllib import parse from .backends import LockTimeout #", "\"\"\" Distributed locking. Usage:: with Lock('my_lock'): print \"Critical section\" :param key: The key", "right away). \"\"\" def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is", "KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger =", "DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock') class Lock(object):", "{ 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' } logger = logging.getLogger('pylock') class Lock(object): \"\"\" Distributed", "% import_path) module, classname = import_path[:dot], import_path[dot + 1:] try: mod = import_module(module)", "Lock('my_lock'): print \"Critical section\" :param key: The key against which the lock will", "URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query strings. path = url.path[1:] path", "None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] # Load", "__enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path):", "mod = import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing pylock backend module", "client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass", "expires = DEFAULT_EXPIRES if timeout is None: timeout = DEFAULT_TIMEOUT if backend_class_path is", "if timeout is None: timeout = DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path =", "def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def", "critical section to execute. :param timeout: If another client has already obtained the", "classname)) def parse_url(url, url_scheme): \"\"\"Parses a distributed lock backend URL.\"\"\" # Register extra", "None: expires = DEFAULT_EXPIRES if timeout is None: timeout = DEFAULT_TIMEOUT if backend_class_path", "class Lock(object): \"\"\" Distributed locking. Usage:: with Lock('my_lock'): print \"Critical section\" :param key:", "key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is None: expires = DEFAULT_EXPIRES if", "ImportError as e: raise ImproperlyConfigured('Error importing pylock backend module %s: \"%s\"' % (module,", "\"%s\"' % (module, e)) try: return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend", "= DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend class", "backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key)", "logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client", "importlib import import_module import logging from six.moves.urllib import parse from .backends import LockTimeout", "with Lock('my_lock'): print \"Critical section\" :param key: The key against which the lock", "expires, timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class", "ImproperlyConfigured(\"%s isn't a pylock backend module.\" % import_path) module, classname = import_path[:dot], import_path[dot", "query strings. path = url.path[1:] path = path.split('?', 2)[0] # Update with environment", "section\" :param key: The key against which the lock will be held. :param", "against which the lock will be held. :param expires: We consider any existing", "None: timeout = DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection", "exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot = import_path.rindex('.') except", "= { 'db': path, 'user': url.username, 'password': <PASSWORD>, 'host': url.hostname, 'port': url.port }", "try: return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\" does not", "# Load backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key =", "key against which the lock will be held. :param expires: We consider any", "e: raise ImproperlyConfigured('Error importing pylock backend module %s: \"%s\"' % (module, e)) try:", "Distributed locking. Usage:: with Lock('my_lock'): print \"Critical section\" :param key: The key against", "backend_connection=None): if expires is None: expires = DEFAULT_EXPIRES if timeout is None: timeout", "self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError: raise", "} logger = logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking. Usage:: with Lock('my_lock'): print", "except ValueError: raise ImproperlyConfigured(\"%s isn't a pylock backend module.\" % import_path) module, classname", "lock, sleep for a maximum of ``timeout`` seconds before giving up. A value", "up right away). \"\"\" def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires", "section to execute. :param timeout: If another client has already obtained the lock,", "exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot = import_path.rindex('.')", "order to detect crashed clients. This value must be higher than it takes", "'redis://' } logger = logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking. Usage:: with Lock('my_lock'):", "= logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking. Usage:: with Lock('my_lock'): print \"Critical section\"", "backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release()", "clients. This value must be higher than it takes the critical section to", "must be higher than it takes the critical section to execute. :param timeout:", "import_module(module) except ImportError as e: raise ImproperlyConfigured('Error importing pylock backend module %s: \"%s\"'", "pylock backend module.\" % import_path) module, classname = import_path[:dot], import_path[dot + 1:] try:", "distributed lock backend URL.\"\"\" # Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url =", "wait (give up right away). \"\"\" def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None):", "it takes the critical section to execute. :param timeout: If another client has", "up. A value of 0 means no wait (give up right away). \"\"\"", "= DEFAULT_EXPIRES if timeout is None: timeout = DEFAULT_TIMEOUT if backend_class_path is None:", "= import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't a pylock backend module.\" % import_path)", "parse from .backends import LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10", "import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't a pylock backend module.\" % import_path) module,", "is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] #", "isn't a pylock backend module.\" % import_path) module, classname = import_path[:dot], import_path[dot +", "DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection", "connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client)", "{ 'db': path, 'user': url.username, 'password': <PASSWORD>, 'host': url.hostname, 'port': url.port } return", "six.moves.urllib import parse from .backends import LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES", "'connection': 'redis://' } logger = logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking. Usage:: with", "from importlib import import_module import logging from six.moves.urllib import parse from .backends import", "backend module \"%s\" does not define a \"%s\" class.' % (module, classname)) def", "from .backends import LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX", "'db': path, 'user': url.username, 'password': <PASSWORD>, 'host': url.hostname, 'port': url.port } return connection_info", "backend_connection = DEFAULT_BACKEND['connection'] # Load backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock", "logger = logging.getLogger('pylock') class Lock(object): \"\"\" Distributed locking. Usage:: with Lock('my_lock'): print \"Critical", "The key against which the lock will be held. :param expires: We consider", "= backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb):", "__exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot =", "DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class':", "seconds before giving up. A value of 0 means no wait (give up", "classname = import_path[:dot], import_path[dot + 1:] try: mod = import_module(module) except ImportError as", "not define a \"%s\" class.' % (module, classname)) def parse_url(url, url_scheme): \"\"\"Parses a", "path.split('?', 2)[0] # Update with environment configuration. connection_info = { 'db': path, 'user':", "execute. :param timeout: If another client has already obtained the lock, sleep for", "locking. Usage:: with Lock('my_lock'): print \"Critical section\" :param key: The key against which", "import parse from .backends import LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES =", "higher than it takes the critical section to execute. :param timeout: If another", "module \"%s\" does not define a \"%s\" class.' % (module, classname)) def parse_url(url,", "noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = {", "backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info =", "dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't a pylock backend module.\" %", "backend module.\" % import_path) module, classname = import_path[:dot], import_path[dot + 1:] try: mod", "key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout,", "is None: timeout = DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if", "a maximum of ``timeout`` seconds before giving up. A value of 0 means", "consider any existing lock older than ``expires`` seconds to be invalid in order", "raise ImproperlyConfigured('Pylock backend module \"%s\" does not define a \"%s\" class.' % (module,", "except ImportError as e: raise ImproperlyConfigured('Error importing pylock backend module %s: \"%s\"' %", "exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError:", "is None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using", "has already obtained the lock, sleep for a maximum of ``timeout`` seconds before", "self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try:", "classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\" does not define a \"%s\"", "% (module, e)) try: return getattr(mod, classname) except AttributeError: raise ImproperlyConfigured('Pylock backend module", "raise ImproperlyConfigured(\"%s isn't a pylock backend module.\" % import_path) module, classname = import_path[:dot],", "backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection']", "with environment configuration. connection_info = { 'db': path, 'user': url.username, 'password': <PASSWORD>, 'host':", "will be held. :param expires: We consider any existing lock older than ``expires``", "maximum of ``timeout`` seconds before giving up. A value of 0 means no", "= 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://' }", "means no wait (give up right away). \"\"\" def __init__(self, key, expires=None, timeout=None,", "import LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:'", "url_scheme): \"\"\"Parses a distributed lock backend URL.\"\"\" # Register extra schemes in URLs.", "# noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND =", "import logging from six.moves.urllib import parse from .backends import LockTimeout # noqa DEFAULT_TIMEOUT", "value must be higher than it takes the critical section to execute. :param", "parse_url(url, url_scheme): \"\"\"Parses a distributed lock backend URL.\"\"\" # Register extra schemes in", "URL.\"\"\" # Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove", "key: The key against which the lock will be held. :param expires: We", "class.' % (module, classname)) def parse_url(url, url_scheme): \"\"\"Parses a distributed lock backend URL.\"\"\"", "DEFAULT_BACKEND['class'] if backend_connection is None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend class backend_class", "module.\" % import_path) module, classname = import_path[:dot], import_path[dot + 1:] try: mod =", "Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query strings.", "# Register extra schemes in URLs. parse.uses_netloc.append(url_scheme) url = parse.urlparse(url) # Remove query", "None: backend_connection = DEFAULT_BACKEND['connection'] # Load backend class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0}", "lock older than ``expires`` seconds to be invalid in order to detect crashed", "pylock backend module %s: \"%s\"' % (module, e)) try: return getattr(mod, classname) except", "``expires`` seconds to be invalid in order to detect crashed clients. This value", "Lock(object): \"\"\" Distributed locking. Usage:: with Lock('my_lock'): print \"Critical section\" :param key: The", "to be invalid in order to detect crashed clients. This value must be", "def get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't a pylock", "for a maximum of ``timeout`` seconds before giving up. A value of 0", ".backends import LockTimeout # noqa DEFAULT_TIMEOUT = 60 DEFAULT_EXPIRES = 10 KEY_PREFIX =", "backend module %s: \"%s\"' % (module, e)) try: return getattr(mod, classname) except AttributeError:", "Update with environment configuration. connection_info = { 'db': path, 'user': url.username, 'password': <PASSWORD>,", "{0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client =", "backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type,", "DEFAULT_EXPIRES if timeout is None: timeout = DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path", "expires: We consider any existing lock older than ``expires`` seconds to be invalid", "``timeout`` seconds before giving up. A value of 0 means no wait (give", "We consider any existing lock older than ``expires`` seconds to be invalid in", "in order to detect crashed clients. This value must be higher than it", "def __init__(self, key, expires=None, timeout=None, backend_class_path=None, backend_connection=None): if expires is None: expires =", "key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info = parse_url(backend_connection, url_scheme=backend_class.url_scheme) client = backend_class.get_client(**connection_info) self._lock =", "timeout = DEFAULT_TIMEOUT if backend_class_path is None: backend_class_path = DEFAULT_BACKEND['class'] if backend_connection is", "from six.moves.urllib import parse from .backends import LockTimeout # noqa DEFAULT_TIMEOUT = 60", "ImproperlyConfigured('Pylock backend module \"%s\" does not define a \"%s\" class.' % (module, classname))", "before giving up. A value of 0 means no wait (give up right", "class backend_class = get_backend_class(backend_class_path) logger.info(\"Using {0} lock backend\".format(backend_class.__name__)) key = \"{0}{1}\".format(KEY_PREFIX, key) connection_info", "detect crashed clients. This value must be higher than it takes the critical", "than it takes the critical section to execute. :param timeout: If another client", "class ImproperlyConfigured(Exception): pass def get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s", "module, classname = import_path[:dot], import_path[dot + 1:] try: mod = import_module(module) except ImportError", "ValueError: raise ImproperlyConfigured(\"%s isn't a pylock backend module.\" % import_path) module, classname =", "A value of 0 means no wait (give up right away). \"\"\" def", "\"%s\" class.' % (module, classname)) def parse_url(url, url_scheme): \"\"\"Parses a distributed lock backend", "client = backend_class.get_client(**connection_info) self._lock = backend_class(key, expires, timeout, client) def __enter__(self): self._lock.acquire() def", "environment configuration. connection_info = { 'db': path, 'user': url.username, 'password': <PASSWORD>, 'host': url.hostname,", "if expires is None: expires = DEFAULT_EXPIRES if timeout is None: timeout =", "except AttributeError: raise ImproperlyConfigured('Pylock backend module \"%s\" does not define a \"%s\" class.'", "\"Critical section\" :param key: The key against which the lock will be held.", "to detect crashed clients. This value must be higher than it takes the", ":param timeout: If another client has already obtained the lock, sleep for a", "configuration. connection_info = { 'db': path, 'user': url.username, 'password': <PASSWORD>, 'host': url.hostname, 'port':", "lock will be held. :param expires: We consider any existing lock older than", "than ``expires`` seconds to be invalid in order to detect crashed clients. This", "a \"%s\" class.' % (module, classname)) def parse_url(url, url_scheme): \"\"\"Parses a distributed lock", "timeout, client) def __enter__(self): self._lock.acquire() def __exit__(self, exc_type, exc_val, exc_tb): self._lock.release() class ImproperlyConfigured(Exception):", "any existing lock older than ``expires`` seconds to be invalid in order to", "(module, classname)) def parse_url(url, url_scheme): \"\"\"Parses a distributed lock backend URL.\"\"\" # Register", "parse.urlparse(url) # Remove query strings. path = url.path[1:] path = path.split('?', 2)[0] #", "pass def get_backend_class(import_path): try: dot = import_path.rindex('.') except ValueError: raise ImproperlyConfigured(\"%s isn't a", "seconds to be invalid in order to detect crashed clients. This value must", "DEFAULT_EXPIRES = 10 KEY_PREFIX = 'pylock:' DEFAULT_BACKEND = { 'class': 'pylock.backends.redis_lock.RedisLock', 'connection': 'redis://'", "backend_class_path=None, backend_connection=None): if expires is None: expires = DEFAULT_EXPIRES if timeout is None:" ]
[ "+ str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate) +", "dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train =", "str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed) \\ + 'W' + str(config.get('num_workers', 'na')) cur_epoch", "dataset name as input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers,", "test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]},", "trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch)", "epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch)", "ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model import Net from utils.config", "eval(model, device, loader, evaluator): model.eval() y_true = [] y_pred = [] for step,", "= [] y_pred = [] for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")):", "DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel()", "<reponame>icmlsubmission-spec/spec-gnn<filename>ogbg/mol/main.py import os import random import torch import torch.nn.functional as F from torch.utils.data", "ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter", "'_' \\ + str(config.hyperparams.decay_rate) + '_' \\ + 'B' + str(config.hyperparams.batch_size) \\ +", "= checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model,", "\"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve = []", "(bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr", "+ str(config.get('identity', 1)) + '_' \\ + str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers)", "+ str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate) +", "dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve = [] test_curve =", "{ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for epoch in", "best train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train, min(trainL_curve))) if __name__", "lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch)", "pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred", "loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP',", "{ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7]) + '_'", "eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:',", "### automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx =", "as {}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict':", "import SummaryWriter import torch.optim as optim import numpy as np from tqdm import", "str(config.epsilon) \\ + 'P' + str(config.power) \\ + 'I' + str(config.get('identity', 1)) +", "= DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl)", "bases) optimizer.zero_grad() ## ignore nan targets (unlabeled) when computing training loss. is_labeled =", "'_' \\ + str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate) + '_' \\ +", "'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close()", "if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch =", "scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if 'classification' in", "{}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(),", "+ str(config.hyperparams.decay_rate) + '_' \\ + 'B' + str(config.hyperparams.batch_size) \\ + 'S' +", "torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\": y_pred} return", "model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr =", "\\ + str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate) + '_' \\ + 'B'", "{}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes", "np from tqdm import tqdm ### importing OGB from ogb.graphproppred import Evaluator, collate_dgl", "test_perf = eval(model, device, test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf})", "optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\")", "'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch)", "if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device =", "None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else", "cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch)", "best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train, min(trainL_curve))) if __name__ == \"__main__\": main()", "valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr},", "train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf =", "labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item()", "loss.item() optimizer.step() return loss_all / len(loader) def eval(model, device, loader, evaluator): model.eval() y_true", "= checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch))", "{}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch =", "optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion", "os import random import torch import torch.nn.functional as F from torch.utils.data import DataLoader", "OGB from ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation", "'best.tar' else: filename = filename_header + 'curr.tar' print(\"Saving model as {}...\".format(filename), end=' ')", "model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore nan targets (unlabeled) when computing training", "train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:',", "str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout) + '_'", "str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones) + '_'", "= bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: with torch.no_grad():", "# print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric],", "device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric],", "lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve))", "if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else:", "train_curve = [] trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) +", "in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve = [] test_curve", "labels.to(device) if x.shape[0] == 1: pass else: with torch.no_grad(): pred = model(bg, x,", "\\ + 'E' + str(config.epsilon) \\ + 'P' + str(config.power) \\ + 'I'", "+ '_' \\ + str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate) + '_' \\", "utils.lr import warm_up_lr def train(model, device, loader, optimizer, criterion): model.train() loss_all = 0", "if torch.cuda.is_available() else 'cpu') ### automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases", "'_' \\ + str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate) + '_' \\ +", "torch.nn.MSELoss() valid_curve = [] test_curve = [] train_curve = [] trainL_curve = []", "valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr},", "== 1: pass else: with torch.no_grad(): pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu())", "in enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat')", "cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for epoch in range(cur_epoch +", "edge_attr, bases) optimizer.zero_grad() ## ignore nan targets (unlabeled) when computing training loss. is_labeled", "torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import time def", "as F from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim as", "x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if", "= bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels =", "model.eval() y_true = [] y_pred = [] for step, (bg, labels) in enumerate(tqdm(loader,", "str(config.get('identity', 1)) + '_' \\ + str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers) +", "if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic dataloading", "+ 'S' + str(config.seed) \\ + 'W' + str(config.get('num_workers', 'na')) cur_epoch = 0", "eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) # print({'Train': train_perf,", "str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate) + '_'", "end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch']", "= dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes dataset name", "checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator) valid_perf", "trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch)", "{}, Validation: {}, epoch: {}, best train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch],", "import torch import torch.nn.functional as F from torch.utils.data import DataLoader from torch.utils.tensorboard import", "= {\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import time def main(): args =", "from tqdm import tqdm ### importing OGB from ogb.graphproppred import Evaluator, collate_dgl import", "0.0 for epoch in range(cur_epoch + 1, config.hyperparams.epochs + 1): if epoch <=", "= eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf =", "'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss)", "best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve)", "str(config.seed) \\ + 'W' + str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train') is", "None: filename_header = str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp) + '_' \\ +", "train_loss = train(model, device, train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() #", "torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name)", "+ '_' \\ + str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate) + '_' \\", "= int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not None:", "warm_up_lr def train(model, device, loader, optimizer, criterion): model.train() loss_all = 0 for step,", "y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import time def main(): args = get_args() config", "bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: with torch.no_grad(): pred", "None: for seed in config.seeds: config.seed = seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config)", "evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric])", "in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat')", "np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic", "') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss", "== labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item() optimizer.step()", "device, test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:',", "{}, best train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train, min(trainL_curve))) if", "criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\")", "= DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset)", "optimizer, criterion): model.train() loss_all = 0 for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train", "= torch.nn.MSELoss() valid_curve = [] test_curve = [] train_curve = [] trainL_curve =", "is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if", "{} training...\".format(epoch)) train_loss = train(model, device, train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs:", "= bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: pred =", "if config.get('resume_train') is not None: print(\"Loading model from {}...\".format(config.resume_train), end=' ') checkpoint =", "for epoch in range(cur_epoch + 1, config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs:", "utils.config import process_config, get_args from utils.lr import warm_up_lr def train(model, device, loader, optimizer,", "torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim as optim import numpy", "filename = filename_header + 'best.tar' else: filename = filename_header + 'curr.tar' print(\"Saving model", "num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in", "edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1:", "train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric])", "end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss':", "get_args() config = process_config(args) print(config) if config.get('seeds') is not None: for seed in", "+ '_' \\ + str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones) + '_' \\", "train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers,", "batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for", "\\ + str(config.hyperparams.decay_rate) + '_' \\ + 'B' + str(config.hyperparams.batch_size) \\ + 'S'", "test_perf = eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train", "num_params = sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate,", "= scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss = train(model, device, train_loader, optimizer, criterion) if", "import DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim as optim import numpy as", "= [] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7])", "+ '_' \\ + str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout) + '_' \\", "valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP',", "epoch in range(cur_epoch + 1, config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch,", "'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir,", "evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator)", "best_val = valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else: filename = filename_header +", "= np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished", "DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset) ###", "process_config(args) print(config) if config.get('seeds') is not None: for seed in config.seeds: config.seed =", "train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if 'classification' in dataset.task_type: best_val_epoch", "print(\"Model saved.\") writer.close() if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve)", "'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if 'classification' in dataset.task_type:", "automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split()", "str(config.power) \\ + 'I' + str(config.get('identity', 1)) + '_' \\ + str(config.architecture.pooling) +", "+ str(config.power) \\ + 'I' + str(config.get('identity', 1)) + '_' \\ + str(config.architecture.pooling)", "run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed)", "torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr':", "= [] train_curve = [] trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp =", "x, edge_attr, bases) optimizer.zero_grad() ## ignore nan targets (unlabeled) when computing training loss.", "device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model, device,", "num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(),", "'_' \\ + str(config.commit_id[0:7]) + '_' \\ + str(config.norm) \\ + 'E' +", "evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:',", "= DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params =", "\\ + str(config.commit_id[0:7]) + '_' \\ + str(config.norm) \\ + 'E' + str(config.epsilon)", "= model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore nan targets (unlabeled) when computing", "test_curve = [] train_curve = [] trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp", "config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp) +", "= checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator)", "= bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass", "dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict)", "enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases", "DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader", "scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss = train(model, device, train_loader, optimizer, criterion) if epoch", "str(config.norm) \\ + 'E' + str(config.epsilon) \\ + 'P' + str(config.power) \\ +", "loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item() optimizer.step() return loss_all", "as input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader", "cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {}", "+ str(config.epsilon) \\ + 'P' + str(config.power) \\ + 'I' + str(config.get('identity', 1))", "test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric])", "shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p", "str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train') is not None: print(\"Loading model from", "train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader,", "cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch)", "= process_config(args) print(config) if config.get('seeds') is not None: for seed in config.seeds: config.seed", "y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import", "else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if", "filename)) print(\"Model saved.\") writer.close() if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train =", "random import torch import torch.nn.functional as F from torch.utils.data import DataLoader from torch.utils.tensorboard", "batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model =", "def eval(model, device, loader, evaluator): model.eval() y_true = [] y_pred = [] for", "{ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp:", "DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model import Net from utils.config import process_config,", "len(loader) def eval(model, device, loader, evaluator): model.eval() y_true = [] y_pred = []", "evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator)", "filename_header + 'curr.tar' print(\"Saving model as {}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(),", "evaluator.eval(input_dict) import time def main(): args = get_args() config = process_config(args) print(config) if", "1, config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr", "in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones,", "= 0.0 for epoch in range(cur_epoch + 1, config.hyperparams.epochs + 1): if epoch", "0 for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device) x", "### importing OGB from ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation", "importing OGB from ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import", "+ str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed) \\ + 'W' + str(config.get('num_workers', 'na'))", "dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes dataset name as", "criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print(\"Model", "loader, optimizer, criterion): model.train() loss_all = 0 for step, (bg, labels) in enumerate(tqdm(loader,", "loss_all = loss_all + loss.item() optimizer.step() return loss_all / len(loader) def eval(model, device,", "'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]},", "in range(cur_epoch + 1, config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs,", "collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False,", "best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {},", "1)) + '_' \\ + str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers) + '_'", "1: pass else: pred = model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore nan", "config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr =", "import tqdm ### importing OGB from ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..')", "= max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {}, Validation:", "and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx", "# train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes dataset name as input", "loss. is_labeled = labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all =", "test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric],", "model as {}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(),", "writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val", "loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model,", "desc=\"Train iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases =", "{ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp:", "F from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim as optim", "\\ + 'S' + str(config.seed) \\ + 'W' + str(config.get('num_workers', 'na')) cur_epoch =", "cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp:", "is not None: filename_header = str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp) + '_'", "writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7]) + '_'", "+ str(config.time_stamp) + '_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val =", "import process_config, get_args from utils.lr import warm_up_lr def train(model, device, loader, optimizer, criterion):", "+ '_' \\ + str(config.commit_id[0:7]) + '_' \\ + str(config.norm) \\ + 'E'", "dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import time def main():", "= loss_all + loss.item() optimizer.step() return loss_all / len(loader) def eval(model, device, loader,", "model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename))", "import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model import Net from utils.config import", "= Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}')", "scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader,", "### automatic evaluator. takes dataset name as input evaluator = Evaluator(config.dataset_name) train_loader =", "+ 'curr.tar' print(\"Saving model as {}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict':", "writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7]) +", "best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {}, Validation: {}, epoch: {},", "training...\".format(epoch)) train_loss = train(model, device, train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step()", "Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set", "valid_curve = [] test_curve = [] train_curve = [] trainL_curve = [] writer", "def main(): args = get_args() config = process_config(args) print(config) if config.get('seeds') is not", "None: print(\"Loading model from {}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict'])", "evaluator) test_perf = eval(model, device, test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test':", "y_pred} return evaluator.eval(input_dict) import time def main(): args = get_args() config = process_config(args)", "in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train", "ts_fk_algo_hp = str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7]) + '_' \\ + str(config.norm)", "{ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir')", "bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict =", "training loss. is_labeled = labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all", "not None: print(\"Loading model from {}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device)", "str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate) + '_' \\ + 'B' + str(config.hyperparams.batch_size)", "main(): args = get_args() config = process_config(args) print(config) if config.get('seeds') is not None:", "writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr',", "config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss = train(model, device,", "import random import torch import torch.nn.functional as F from torch.utils.data import DataLoader from", "config.get('resume_train') is not None: print(\"Loading model from {}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train)", "test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr)", "valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:',", "test: {}, Validation: {}, epoch: {}, best train: {}, best loss: {}' .format(test_curve[best_val_epoch],", "filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes dataset name as input evaluator = Evaluator(config.dataset_name)", "Validation: {}, epoch: {}, best train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch,", "= valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else: filename = filename_header + 'curr.tar'", "= filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes dataset name as input evaluator =", "scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model,", "valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:',", "= torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic dataloading and splitting dataset =", "+ 'B' + str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed) \\ + 'W' +", "'_' \\ + str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout) + '_' \\ +", "= train(model, device, train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step()", "+ str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header +", "epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not", "input_dict = {\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import time def main(): args", "desc=\"Eval iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases =", "if \"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve =", "[] train_curve = [] trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp)", "\\ + str(config.norm) \\ + 'E' + str(config.epsilon) \\ + 'P' + str(config.power)", "device, loader, optimizer, criterion): model.train() loss_all = 0 for step, (bg, labels) in", "<= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss", "epoch) if config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7]) + '_' \\ +", "/ len(loader) def eval(model, device, loader, evaluator): model.eval() y_true = [] y_pred =", "sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model import", "milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss()", "= [] for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device)", "if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader,", "writer.close() if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch", "not None: for seed in config.seeds: config.seed = seed config.time_stamp = int(time.time()) print(config)", "SummaryWriter import torch.optim as optim import numpy as np from tqdm import tqdm", "'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve))", "+ '_' \\ + str(config.time_stamp) + '_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] >", "+ 'best.tar' else: filename = filename_header + 'curr.tar' print(\"Saving model as {}...\".format(filename), end='", "run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device", "epoch: {}, best train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train, min(trainL_curve)))", "= torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion", "+ str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden) +", "random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')", "torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic dataloading and", "+ '_' \\ + str(config.hyperparams.decay_rate) + '_' \\ + 'B' + str(config.hyperparams.batch_size) \\", "iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases')", "config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda'", "[] for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device) x", "batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader =", "optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if", "1: pass else: with torch.no_grad(): pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu())", "best_train = min(train_curve) print('Finished test: {}, Validation: {}, epoch: {}, best train: {},", "as optim import numpy as np from tqdm import tqdm ### importing OGB", "+ str(config.norm) \\ + 'E' + str(config.epsilon) \\ + 'P' + str(config.power) \\", "Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False,", "writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if", "splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx =", "max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {}, Validation: {},", "# scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device,", "= 0 if config.get('resume_train') is not None: print(\"Loading model from {}...\".format(config.resume_train), end=' ')", "torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve = [] test_curve = [] train_curve =", "labels = labels.to(device) if x.shape[0] == 1: pass else: with torch.no_grad(): pred =", "p in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer,", "writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for epoch in range(cur_epoch + 1,", "test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]},", "optimizer.step() return loss_all / len(loader) def eval(model, device, loader, evaluator): model.eval() y_true =", "collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in model.parameters())", "bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels", "{ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp:", "+ 1, config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer)", "print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device,", "{num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\"", "Net from utils.config import process_config, get_args from utils.lr import warm_up_lr def train(model, device,", "model from {}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict'])", "not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available()", "from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model import Net from", "DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model", "+ 'I' + str(config.get('identity', 1)) + '_' \\ + str(config.architecture.pooling) + '_' \\", "labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr =", "filename_header + 'best.tar' else: filename = filename_header + 'curr.tar' print(\"Saving model as {}...\".format(filename),", "filter_train_set from model import Net from utils.config import process_config, get_args from utils.lr import", "warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss = train(model,", "print(\"Saving model as {}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict':", "'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp:", "is_labeled = labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all", "cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for epoch in range(cur_epoch", "with torch.no_grad(): pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true,", "+ loss.item() optimizer.step() return loss_all / len(loader) def eval(model, device, loader, evaluator): model.eval()", "lr}, cur_epoch) best_val = 0.0 for epoch in range(cur_epoch + 1, config.hyperparams.epochs +", "dataset) ### automatic evaluator. takes dataset name as input evaluator = Evaluator(config.dataset_name) train_loader", "config.get('seeds') is not None: for seed in config.seeds: config.seed = seed config.time_stamp =", "+ '_' \\ + 'B' + str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed) \\", "(bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr", "valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp:", "= torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import time", "evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]],", "targets (unlabeled) when computing training loss. is_labeled = labels == labels loss =", "+ 'P' + str(config.power) \\ + 'I' + str(config.get('identity', 1)) + '_' \\", "DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim as optim import numpy as np", "= eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric],", "'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss,", "print('Finished test: {}, Validation: {}, epoch: {}, best train: {}, best loss: {}'", "min(train_curve) print('Finished test: {}, Validation: {}, epoch: {}, best train: {}, best loss:", "labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item() optimizer.step() return", "if config.get('seeds') is not None: for seed in config.seeds: config.seed = seed config.time_stamp", "optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model,", "collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from", "= filename_header + 'curr.tar' print(\"Saving model as {}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict':", "torch import torch.nn.functional as F from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter", "(unlabeled) when computing training loss. is_labeled = labels == labels loss = criterion(pred.to(torch.float32)[is_labeled],", "device, loader, evaluator): model.eval() y_true = [] y_pred = [] for step, (bg,", "collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device)", "criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve = [] test_curve = []", "computing training loss. is_labeled = labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward()", "config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator) valid_perf =", "\\ + str(config.time_stamp) + '_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val", "str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden) + '_'", "str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar'", "'_' \\ + str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden) + '_' \\ +", "is not None: print(\"Loading model from {}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict'])", "model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr']", "torch.utils.tensorboard import SummaryWriter import torch.optim as optim import numpy as np from tqdm", "\\ + str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden)", "train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not None: filename_header =", "'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp:", "lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader,", "= get_args() config = process_config(args) print(config) if config.get('seeds') is not None: for seed", "eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model,", "+ str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout) +", "cur_epoch = 0 if config.get('resume_train') is not None: print(\"Loading model from {}...\".format(config.resume_train), end='", "= 0 for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device)", "[] trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \\", "bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: pred = model(bg,", "+ 'W' + str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train') is not None:", "0 if config.get('resume_train') is not None: print(\"Loading model from {}...\".format(config.resume_train), end=' ') checkpoint", "ogbg.mol.utils.filter import filter_train_set from model import Net from utils.config import process_config, get_args from", "is not None: for seed in config.seeds: config.seed = seed config.time_stamp = int(time.time())", "print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator)", "dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"],", "test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params", "valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) print('Train:',", "torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') ###", "best_val = 0.0 for epoch in range(cur_epoch + 1, config.hyperparams.epochs + 1): if", "lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch)", "= np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {}, Validation: {}, epoch: {}, best", "{}, epoch: {}, best train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train,", "= [] trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_'", "print(\"Loading model from {}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict'])", "writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for epoch", "str(config.time_stamp) + '_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric]", "if x.shape[0] == 1: pass else: with torch.no_grad(): pred = model(bg, x, edge_attr,", "else: filename = filename_header + 'curr.tar' print(\"Saving model as {}...\".format(filename), end=' ') torch.save({'epoch':", "labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item() optimizer.step() return loss_all / len(loader) def", "evaluator. takes dataset name as input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size,", "return loss_all / len(loader) def eval(model, device, loader, evaluator): model.eval() y_true = []", "\\ + str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout)", "'_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename =", "num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}') optimizer =", "print(\"Epoch {} training...\".format(epoch)) train_loss = train(model, device, train_loader, optimizer, criterion) if epoch >", "+ '_' \\ + str(config.norm) \\ + 'E' + str(config.epsilon) \\ + 'P'", "int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed)", "= str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp) + '_' \\ + str(config.dataset_name) if", "else: with torch.no_grad(): pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true =", "shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture,", "[] y_pred = [] for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg", "+ str(config.seed) \\ + 'W' + str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train')", "evaluator): model.eval() y_true = [] y_pred = [] for step, (bg, labels) in", "'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch)", "'P' + str(config.power) \\ + 'I' + str(config.get('identity', 1)) + '_' \\ +", "'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]},", "device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic dataloading and splitting dataset", "[] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7]) +", "print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric])", "config.seed = seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if", "> best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else: filename =", "tqdm ### importing OGB from ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..') from", "loss_all / len(loader) def eval(model, device, loader, evaluator): model.eval() y_true = [] y_pred", "seed in config.seeds: config.seed = seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config)", "config = process_config(args) print(config) if config.get('seeds') is not None: for seed in config.seeds:", "valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP',", "== 1: pass else: pred = model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore", "step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat')", "'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if 'classification' in dataset.task_type: best_val_epoch =", "y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true,", "process_config, get_args from utils.lr import warm_up_lr def train(model, device, loader, optimizer, criterion): model.train()", "numpy as np from tqdm import tqdm ### importing OGB from ogb.graphproppred import", "+ '_' \\ + str(config.architecture.layers) + '_' \\ + str(config.architecture.hidden) + '_' \\", "\\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header", "print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric])", "print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed)", "str(config.commit_id[0:7]) + '_' \\ + str(config.norm) \\ + 'E' + str(config.epsilon) \\ +", "= DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl)", "config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss =", "criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model, device,", "str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp) + '_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric]", "{ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for epoch in range(cur_epoch + 1, config.hyperparams.epochs", "test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]},", "from torch.utils.tensorboard import SummaryWriter import torch.optim as optim import numpy as np from", "lr}, epoch) if config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7]) + '_' \\", "loss_all + loss.item() optimizer.step() return loss_all / len(loader) def eval(model, device, loader, evaluator):", "y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\":", "\\ + str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones)", "## ignore nan targets (unlabeled) when computing training loss. is_labeled = labels ==", "import filter_train_set from model import Net from utils.config import process_config, get_args from utils.lr", "bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else:", "checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model, device,", "device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:',", "\\ + 'P' + str(config.power) \\ + 'I' + str(config.get('identity', 1)) + '_'", "+ '_' \\ + str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers) + '_' \\", "best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else: filename = filename_header", "') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss,", "+ str(config.commit_id[0:7]) + '_' \\ + str(config.norm) \\ + 'E' + str(config.epsilon) \\", "def train(model, device, loader, optimizer, criterion): model.train() loss_all = 0 for step, (bg,", "total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator.", "test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]},", "'_' \\ + 'B' + str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed) \\ +", "'I' + str(config.get('identity', 1)) + '_' \\ + str(config.architecture.pooling) + '_' \\ +", "epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr},", "torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion =", "train: {}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train, min(trainL_curve))) if __name__ ==", "when computing training loss. is_labeled = labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled])", "= min(train_curve) print('Finished test: {}, Validation: {}, epoch: {}, best train: {}, best", "for seed in config.seeds: config.seed = seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else:", "cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0 for", "[] test_curve = [] train_curve = [] trainL_curve = [] writer = SummaryWriter(config.directory)", "print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator) valid_perf =", "'S' + str(config.seed) \\ + 'W' + str(config.get('num_workers', 'na')) cur_epoch = 0 if", "seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is", "bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device)", "torch.optim as optim import numpy as np from tqdm import tqdm ### importing", "= bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0]", "get_args from utils.lr import warm_up_lr def train(model, device, loader, optimizer, criterion): model.train() loss_all", "+ '_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename", "else: criterion = torch.nn.MSELoss() valid_curve = [] test_curve = [] train_curve = []", "not None: filename_header = str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp) + '_' \\", "labels = labels.to(device) if x.shape[0] == 1: pass else: pred = model(bg, x,", "optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss = train(model, device, train_loader, optimizer,", "valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp:", "= criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item() optimizer.step() return loss_all /", "lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion =", "from {}...\".format(config.resume_train), end=' ') checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch", "np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {}, Validation: {}, epoch: {}, best train:", "range(cur_epoch + 1, config.hyperparams.epochs + 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate,", "name as input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl)", "evaluator) test_perf = eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric],", "from model import Net from utils.config import process_config, get_args from utils.lr import warm_up_lr", "writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr',", "time def main(): args = get_args() config = process_config(args) print(config) if config.get('seeds') is", "optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in", "eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss,", "'W' + str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train') is not None: print(\"Loading", "train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss},", "epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator)", "model.train() loss_all = 0 for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg", "'curr.tar' print(\"Saving model as {}...\".format(filename), end=' ') torch.save({'epoch': epoch, 'model_state_dict': model.state_dict(), 'criterion_state_dict': criterion.state_dict(),", "return evaluator.eval(input_dict) import time def main(): args = get_args() config = process_config(args) print(config)", "train(model, device, train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...')", "train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes dataset name as input evaluator", "+ str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train') is not None: print(\"Loading model", "{} evaluating...\".format(cur_epoch)) train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader,", "x.shape[0] == 1: pass else: with torch.no_grad(): pred = model(bg, x, edge_attr, bases)", "train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP',", "y_pred = [] for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg =", "model import Net from utils.config import process_config, get_args from utils.lr import warm_up_lr def", "train(model, device, loader, optimizer, criterion): model.train() loss_all = 0 for step, (bg, labels)", "import torch.optim as optim import numpy as np from tqdm import tqdm ###", "= filename_header + 'best.tar' else: filename = filename_header + 'curr.tar' print(\"Saving model as", "pass else: with torch.no_grad(): pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true", "import Net from utils.config import process_config, get_args from utils.lr import warm_up_lr def train(model,", "= SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7]) + '_' \\", "= eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) # print({'Train':", "np.argmax(np.array(valid_curve)) best_train = max(train_curve) else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test:", "model = Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in model.parameters()) print(f'#Params:", "= sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay)", "config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not", "for p in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler =", "\\ + str(config.architecture.hidden) + '_' \\ + str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate)", "1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch", "'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', train_loss, 'lr:', lr)", "torch.no_grad(): pred = model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy()", "else: best_val_epoch = np.argmin(np.array(valid_curve)) best_train = min(train_curve) print('Finished test: {}, Validation: {}, epoch:", "for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device) x =", "shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]],", "Net(config.architecture, num_tasks=dataset.num_tasks, num_basis=dataset.graphs[0].edata['bases'].shape[1]).to(device) num_params = sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}') optimizer", "from utils.lr import warm_up_lr def train(model, device, loader, optimizer, criterion): model.train() loss_all =", "valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf,", "loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP',", "torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr", "input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader =", "device, train_loader, optimizer, criterion) if epoch > config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf", "'criterion_state_dict': criterion.state_dict(), 'optimizer_state_dict': optimizer.state_dict(), 'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model", "str(config.hyperparams.decay_rate) + '_' \\ + 'B' + str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed)", "criterion): model.train() loss_all = 0 for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")):", "epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch)", "torch.cuda.manual_seed_all(config.seed) device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') ### automatic dataloading and splitting", "= eval(model, device, test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:',", "= labels == labels loss = criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all +", "print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train", "import torch.nn.functional as F from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import", "ignore nan targets (unlabeled) when computing training loss. is_labeled = labels == labels", "step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat')", "+ 1): if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr']", "epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch))", "checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf", "str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate) + '_'", "torch.nn.functional as F from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim", "tqdm import tqdm ### importing OGB from ogb.graphproppred import Evaluator, collate_dgl import sys", "train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP',", "import time def main(): args = get_args() config = process_config(args) print(config) if config.get('seeds')", "x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy()", "optimizer.zero_grad() ## ignore nan targets (unlabeled) when computing training loss. is_labeled = labels", "test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val = 0.0", "'_' \\ + str(config.time_stamp) + '_' \\ + str(config.dataset_name) if valid_perf[dataset.eval_metric] > best_val:", "import Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import", "writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL',", "= [] test_curve = [] train_curve = [] trainL_curve = [] writer =", "os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train", "'na')) cur_epoch = 0 if config.get('resume_train') is not None: print(\"Loading model from {}...\".format(config.resume_train),", "trainL_curve = [] writer = SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \\ +", "if x.shape[0] == 1: pass else: pred = model(bg, x, edge_attr, bases) optimizer.zero_grad()", "loss.backward() loss_all = loss_all + loss.item() optimizer.step() return loss_all / len(loader) def eval(model,", "'cpu') ### automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx", "valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers,", "valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else: filename = filename_header + 'curr.tar' print(\"Saving", "= seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed')", "model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate)", "in config.seeds: config.seed = seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def", "sum(p.numel() for p in model.parameters()) print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler", "pred = model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore nan targets (unlabeled) when", "cur_epoch) best_val = 0.0 for epoch in range(cur_epoch + 1, config.hyperparams.epochs + 1):", "def run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available(): torch.cuda.manual_seed_all(config.seed)", "weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss()", "enumerate(tqdm(loader, desc=\"Eval iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases", "edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict", "\\ + 'B' + str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed) \\ + 'W'", "from utils.config import process_config, get_args from utils.lr import warm_up_lr def train(model, device, loader,", "= torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\": y_true, \"y_pred\": y_pred}", "num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) model = Net(config.architecture, num_tasks=dataset.num_tasks,", "= eval(model, device, test_loader, evaluator) print('Train:', train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:',", "train_perf[dataset.eval_metric], 'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric])", "{}, best loss: {}' .format(test_curve[best_val_epoch], valid_curve[best_val_epoch], best_val_epoch, best_train, min(trainL_curve))) if __name__ == \"__main__\":", "bg.ndata.pop('feat') edge_attr = bg.edata.pop('feat') bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] ==", "if config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp)", "as np from tqdm import tqdm ### importing OGB from ogb.graphproppred import Evaluator,", "'_' \\ + str(config.architecture.pooling) + '_' \\ + str(config.architecture.layers) + '_' \\ +", "config.seeds: config.seed = seed config.time_stamp = int(time.time()) print(config) run_with_given_seed(config) else: run_with_given_seed(config) def run_with_given_seed(config):", "'_' \\ + str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones) + '_' \\ +", "import warm_up_lr def train(model, device, loader, optimizer, criterion): model.train() loss_all = 0 for", "bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: with", "= labels.to(device) if x.shape[0] == 1: pass else: pred = model(bg, x, edge_attr,", "for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device) x =", "eval(model, device, test_loader, evaluator) # print({'Train': train_perf, 'Validation': valid_perf, 'Test': test_perf}) print('Train:', train_perf[dataset.eval_metric],", "bases = bg.edata.pop('bases') labels = labels.to(device) if x.shape[0] == 1: pass else: pred", "y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred, dim=0).numpy() input_dict = {\"y_true\":", "labels.to(device) if x.shape[0] == 1: pass else: pred = model(bg, x, edge_attr, bases)", "\\ + 'I' + str(config.get('identity', 1)) + '_' \\ + str(config.architecture.pooling) + '_'", "filename_header = str(config.commit_id[0:7]) + '_' \\ + str(config.time_stamp) + '_' \\ + str(config.dataset_name)", "from ogbg.mol.utils.filter import filter_train_set from model import Net from utils.config import process_config, get_args", "gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve", "import os import random import torch import torch.nn.functional as F from torch.utils.data import", "num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size, shuffle=False, num_workers=config.num_workers, collate_fn=collate_dgl) test_loader = DataLoader(dataset[split_idx[\"test\"]], batch_size=config.hyperparams.batch_size,", "cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch {} evaluating...\".format(cur_epoch)) train_perf =", "= torch.nn.BCEWithLogitsLoss() else: criterion = torch.nn.MSELoss() valid_curve = [] test_curve = [] train_curve", "import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model", "checkpoint = torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss =", "sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from ogbg.mol.utils.filter import filter_train_set from model import Net", "\\ + str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate)", "print(config) if config.get('seeds') is not None: for seed in config.seeds: config.seed = seed", "print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic", "print(f'#Params: {num_params}') optimizer = optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if", "> config.hyperparams.warmup_epochs: scheduler.step() # scheduler.step() print('Evaluating...') train_perf = eval(model, device, train_loader, evaluator) valid_perf", "writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not None:", "train_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(train_loss) writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, epoch) writer.add_scalars('valP', {ts_fk_algo_hp:", "device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) # print({'Train': train_perf, 'Validation':", "'E' + str(config.epsilon) \\ + 'P' + str(config.power) \\ + 'I' + str(config.get('identity',", "scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss'] lr = checkpoint['lr'] print(\"Model loaded.\") print(\"Epoch", "+ str(config.hyperparams.milestones) + '_' \\ + str(config.hyperparams.decay_rate) + '_' \\ + 'B' +", "x.shape[0] == 1: pass else: pred = model(bg, x, edge_attr, bases) optimizer.zero_grad() ##", "takes dataset name as input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True,", "'B' + str(config.hyperparams.batch_size) \\ + 'S' + str(config.seed) \\ + 'W' + str(config.get('num_workers',", "train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss},", "'Validation:', valid_perf[dataset.eval_metric], 'Test:', test_perf[dataset.eval_metric], 'Train loss:', cur_loss, 'lr:', lr) train_curve.append(train_perf[dataset.eval_metric]) valid_curve.append(valid_perf[dataset.eval_metric]) test_curve.append(test_perf[dataset.eval_metric]) trainL_curve.append(cur_loss)", "+ 'E' + str(config.epsilon) \\ + 'P' + str(config.power) \\ + 'I' +", "SummaryWriter(config.directory) ts_fk_algo_hp = str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7]) + '_' \\ +", "pass else: pred = model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore nan targets", "labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg = bg.to(device) x = bg.ndata.pop('feat') edge_attr =", "lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss = train(model, device, train_loader, optimizer, criterion)", "criterion = torch.nn.MSELoss() valid_curve = [] test_curve = [] train_curve = [] trainL_curve", "automatic evaluator. takes dataset name as input evaluator = Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]],", "model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred = torch.cat(y_pred,", "epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not None: filename_header = str(config.commit_id[0:7])", "train_perf = eval(model, device, train_loader, evaluator) valid_perf = eval(model, device, valid_loader, evaluator) test_perf", "= optim.AdamW(model.parameters(), lr=config.hyperparams.learning_rate, weight_decay=config.hyperparams.weight_decay) scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type:", "run_with_given_seed(config) def run_with_given_seed(config): if config.get('seed') is not None: random.seed(config.seed) torch.manual_seed(config.seed) np.random.seed(config.seed) if torch.cuda.is_available():", "torch.cuda.is_available() else 'cpu') ### automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total:", "if epoch <= config.hyperparams.warmup_epochs: warm_up_lr(epoch, config.hyperparams.warmup_epochs, config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {}", "= labels.to(device) if x.shape[0] == 1: pass else: with torch.no_grad(): pred = model(bg,", "split_idx = dataset.get_idx_split() # train_idx = filter_train_set(split_idx[\"train\"], dataset) ### automatic evaluator. takes dataset", "\\ + 'W' + str(config.get('num_workers', 'na')) cur_epoch = 0 if config.get('resume_train') is not", "'scheduler_state_dict': scheduler.state_dict(), 'loss': train_loss, 'lr': lr}, os.path.join(config.checkpoint_dir, filename)) print(\"Model saved.\") writer.close() if 'classification'", "= model(bg, x, edge_attr, bases) y_true.append(labels.view(pred.shape).detach().cpu()) y_pred.append(pred.detach().cpu()) y_true = torch.cat(y_true, dim=0).numpy() y_pred =", "{ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is not None: filename_header", "loader, evaluator): model.eval() y_true = [] y_pred = [] for step, (bg, labels)", "else: pred = model(bg, x, edge_attr, bases) optimizer.zero_grad() ## ignore nan targets (unlabeled)", "\"y_pred\": y_pred} return evaluator.eval(input_dict) import time def main(): args = get_args() config =", "valid_perf = eval(model, device, valid_loader, evaluator) test_perf = eval(model, device, test_loader, evaluator) #", "scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=config.hyperparams.milestones, gamma=config.hyperparams.decay_rate) if \"classification\" in dataset.task_type: criterion = torch.nn.BCEWithLogitsLoss() else:", "y_true = [] y_pred = [] for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Eval", "from torch.utils.data import DataLoader from torch.utils.tensorboard import SummaryWriter import torch.optim as optim import", "test_perf[dataset.eval_metric]}, epoch) writer.add_scalars('traL', {ts_fk_algo_hp: train_loss}, epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, epoch) if config.get('checkpoint_dir') is", "loss_all = 0 for step, (bg, labels) in enumerate(tqdm(loader, desc=\"Train iteration\")): bg =", "{ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp: cur_loss}, cur_epoch) writer.add_scalars('lr', {ts_fk_algo_hp: lr}, cur_epoch) best_val =", "saved.\") writer.close() if 'classification' in dataset.task_type: best_val_epoch = np.argmax(np.array(valid_curve)) best_train = max(train_curve) else:", "from ogb.graphproppred import Evaluator, collate_dgl import sys sys.path.append('../..') from ogbg.mol.utils.data_preparation import DglGraphPropPredDatasetAugmentation from", "{ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL', {ts_fk_algo_hp:", "optim import numpy as np from tqdm import tqdm ### importing OGB from", "config.hyperparams.learning_rate, optimizer) lr = scheduler.optimizer.param_groups[0]['lr'] print(\"Epoch {} training...\".format(epoch)) train_loss = train(model, device, train_loader,", "criterion(pred.to(torch.float32)[is_labeled], labels.to(torch.float32)[is_labeled]) loss.backward() loss_all = loss_all + loss.item() optimizer.step() return loss_all / len(loader)", "= torch.load(config.resume_train) model.load_state_dict(checkpoint['model_state_dict']) model.to(device) criterion.load_state_dict(checkpoint['criterion_state_dict']) optimizer.load_state_dict(checkpoint['optimizer_state_dict']) scheduler.load_state_dict(checkpoint['scheduler_state_dict']) cur_epoch = checkpoint['epoch'] cur_loss = checkpoint['loss']", "dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1])) split_idx = dataset.get_idx_split() #", "args = get_args() config = process_config(args) print(config) if config.get('seeds') is not None: for", "+ str(config.architecture.dropout) + '_' \\ + str(config.hyperparams.learning_rate) + '_' \\ + str(config.hyperparams.milestones) +", "filename = filename_header + 'curr.tar' print(\"Saving model as {}...\".format(filename), end=' ') torch.save({'epoch': epoch,", "= str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7]) + '_' \\ + str(config.norm) \\", "valid_perf[dataset.eval_metric] > best_val: best_val = valid_perf[dataset.eval_metric] filename = filename_header + 'best.tar' else: filename", "writer.add_scalars('traP', {ts_fk_algo_hp: train_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('valP', {ts_fk_algo_hp: valid_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('tstP', {ts_fk_algo_hp: test_perf[dataset.eval_metric]}, cur_epoch) writer.add_scalars('traL',", "nan targets (unlabeled) when computing training loss. is_labeled = labels == labels loss", "str(config.time_stamp) + '_' \\ + str(config.commit_id[0:7]) + '_' \\ + str(config.norm) \\ +", "else 'cpu') ### automatic dataloading and splitting dataset = DglGraphPropPredDatasetAugmentation(name=config.dataset_name) print(\"Bases total: {}\".format(dataset.graphs[0].edata['bases'].shape[1]))", "'_' \\ + str(config.norm) \\ + 'E' + str(config.epsilon) \\ + 'P' +", "{\"y_true\": y_true, \"y_pred\": y_pred} return evaluator.eval(input_dict) import time def main(): args = get_args()", "= Evaluator(config.dataset_name) train_loader = DataLoader(dataset[split_idx[\"train\"]], batch_size=config.hyperparams.batch_size, shuffle=True, num_workers=config.num_workers, collate_fn=collate_dgl) valid_loader = DataLoader(dataset[split_idx[\"valid\"]], batch_size=config.hyperparams.batch_size,", "import numpy as np from tqdm import tqdm ### importing OGB from ogb.graphproppred" ]
[ "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "software and associated documentation files (the \"Software\"), to deal # in the Software", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "# notice and this permission notice shall be included in all copies or", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "to the following conditions: The above copyright # notice and this permission notice", "portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "IN THE # SOFTWARE. from flask import Flask, render_template, request, jsonify app =", "be included in all copies or # substantial portions of the Software. #", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "and to permit persons to whom the Software is # furnished to do", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "the Software without restriction, including without limitation the rights # to use, copy,", "permission notice shall be included in all copies or # substantial portions of", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "person obtaining a copy # of this software and associated documentation files (the", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "the Software, and to permit persons to whom the Software is # furnished", "permit persons to whom the Software is # furnished to do so, subject", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "formularios en Javascript # # Copyright 2018 <NAME> <<EMAIL>> # # Permission is", "from flask import Flask, render_template, request, jsonify app = Flask(__name__) @app.route('/') def index():", "Permission is hereby granted, free of charge, to any person obtaining a copy", "de formularios en Javascript # # Copyright 2018 <NAME> <<EMAIL>> # # Permission", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "above copyright # notice and this permission notice shall be included in all", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "in the Software without restriction, including without limitation the rights # to use,", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "Software without restriction, including without limitation the rights # to use, copy, modify,", "The above copyright # notice and this permission notice shall be included in", "Aplicación de validación de formularios en Javascript # # Copyright 2018 <NAME> <<EMAIL>>", "en Javascript # # Copyright 2018 <NAME> <<EMAIL>> # # Permission is hereby", "copyright # notice and this permission notice shall be included in all copies", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "OR OTHER DEALINGS IN THE # SOFTWARE. from flask import Flask, render_template, request,", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "copies of the Software, and to permit persons to whom the Software is", "# of this software and associated documentation files (the \"Software\"), to deal #", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "Software is # furnished to do so, subject to the following conditions: The", "is hereby granted, free of charge, to any person obtaining a copy #", "persons to whom the Software is # furnished to do so, subject to", "OTHER DEALINGS IN THE # SOFTWARE. from flask import Flask, render_template, request, jsonify", "sell # copies of the Software, and to permit persons to whom the", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "documentation files (the \"Software\"), to deal # in the Software without restriction, including", "to permit persons to whom the Software is # furnished to do so,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "# Aplicación de validación de formularios en Javascript # # Copyright 2018 <NAME>", "DEALINGS IN THE # SOFTWARE. from flask import Flask, render_template, request, jsonify app", "and this permission notice shall be included in all copies or # substantial", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "de validación de formularios en Javascript # # Copyright 2018 <NAME> <<EMAIL>> #", "of charge, to any person obtaining a copy # of this software and", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "whom the Software is # furnished to do so, subject to the following", "THE # SOFTWARE. from flask import Flask, render_template, request, jsonify app = Flask(__name__)", "# substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "# # Permission is hereby granted, free of charge, to any person obtaining", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "free of charge, to any person obtaining a copy # of this software", "do so, subject to the following conditions: The above copyright # notice and", "app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/', methods=[\"POST\"]) def display(): return", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "shall be included in all copies or # substantial portions of the Software.", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "and/or sell # copies of the Software, and to permit persons to whom", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "this permission notice shall be included in all copies or # substantial portions", "Copyright 2018 <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge,", "# furnished to do so, subject to the following conditions: The above copyright", "jsonify app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/', methods=[\"POST\"]) def display():", "subject to the following conditions: The above copyright # notice and this permission", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "# in the Software without restriction, including without limitation the rights # to", "files (the \"Software\"), to deal # in the Software without restriction, including without", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "the following conditions: The above copyright # notice and this permission notice shall", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "copy # of this software and associated documentation files (the \"Software\"), to deal", "validación de formularios en Javascript # # Copyright 2018 <NAME> <<EMAIL>> # #", "all copies or # substantial portions of the Software. # # THE SOFTWARE", "import Flask, render_template, request, jsonify app = Flask(__name__) @app.route('/') def index(): return render_template('index.html')", "# # Copyright 2018 <NAME> <<EMAIL>> # # Permission is hereby granted, free", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "to deal # in the Software without restriction, including without limitation the rights", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "to any person obtaining a copy # of this software and associated documentation", "to do so, subject to the following conditions: The above copyright # notice", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "furnished to do so, subject to the following conditions: The above copyright #", "of the Software, and to permit persons to whom the Software is #", "flask import Flask, render_template, request, jsonify app = Flask(__name__) @app.route('/') def index(): return", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "included in all copies or # substantial portions of the Software. # #", "2018 <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to", "and associated documentation files (the \"Software\"), to deal # in the Software without", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "notice and this permission notice shall be included in all copies or #", "following conditions: The above copyright # notice and this permission notice shall be", "Javascript # # Copyright 2018 <NAME> <<EMAIL>> # # Permission is hereby granted,", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from flask import Flask,", "any person obtaining a copy # of this software and associated documentation files", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from flask import", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "Flask, render_template, request, jsonify app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/',", "so, subject to the following conditions: The above copyright # notice and this", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "sublicense, and/or sell # copies of the Software, and to permit persons to", "a copy # of this software and associated documentation files (the \"Software\"), to", "deal # in the Software without restriction, including without limitation the rights #", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from flask", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "Software, and to permit persons to whom the Software is # furnished to", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "# Copyright 2018 <NAME> <<EMAIL>> # # Permission is hereby granted, free of", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "or # substantial portions of the Software. # # THE SOFTWARE IS PROVIDED", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "(the \"Software\"), to deal # in the Software without restriction, including without limitation", "this software and associated documentation files (the \"Software\"), to deal # in the", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "in all copies or # substantial portions of the Software. # # THE", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "SOFTWARE. from flask import Flask, render_template, request, jsonify app = Flask(__name__) @app.route('/') def", "charge, to any person obtaining a copy # of this software and associated", "associated documentation files (the \"Software\"), to deal # in the Software without restriction,", "conditions: The above copyright # notice and this permission notice shall be included", "<<EMAIL>> # # Permission is hereby granted, free of charge, to any person", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "hereby granted, free of charge, to any person obtaining a copy # of", "of this software and associated documentation files (the \"Software\"), to deal # in", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "granted, free of charge, to any person obtaining a copy # of this", "USE OR OTHER DEALINGS IN THE # SOFTWARE. from flask import Flask, render_template,", "# copies of the Software, and to permit persons to whom the Software", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "request, jsonify app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/', methods=[\"POST\"]) def", "is # furnished to do so, subject to the following conditions: The above", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "<NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any", "to whom the Software is # furnished to do so, subject to the", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "= Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/', methods=[\"POST\"]) def display(): return jsonify(request.form)", "copies or # substantial portions of the Software. # # THE SOFTWARE IS", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "render_template, request, jsonify app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/', methods=[\"POST\"])", "notice shall be included in all copies or # substantial portions of the", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "# Permission is hereby granted, free of charge, to any person obtaining a", "# SOFTWARE. from flask import Flask, render_template, request, jsonify app = Flask(__name__) @app.route('/')", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "the Software is # furnished to do so, subject to the following conditions:" ]
[ "in updating-UI state # Update image grid UI img_indices = self.page_img_indices[self.page_index] for i", "# Set images to be shown (in that order) if img_indices == []:", "range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI -", "False # ------------ # UI - image grid # ------------ self.w_imgs = []", "widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) #", "str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' # Update", "img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label", "widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style =", "Note that updating the dropdown label in code (e.g. in the update_ui() function)", "for i in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button", "images are on what image page # (page == grid of images on", "dataset self.context = context self.grid_size = grid_size # Set images to be shown", "self.page_index = int(obj['new']['value']) self.update_ui() except Exception as e: pass # Init self.bo_updating_ui =", "self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width", "return img_bytes def list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i", "# Helpers # ------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes", "else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' # Update zoom", "w_label.layout.visibility = 'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False", "= \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices)", "# ------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D,", "UI elements def create_ui(self): # ------------ # Callbacks # ------------ # Callback for", "image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create all UI elements def", "\" + str(img_index) w_button.description = \"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden'", "Layout, IntSlider import io from cvtk import ClassificationDataset, Label #import sys #import bqplot,", "widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes =", "img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value =", "into image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button)", "label in code (e.g. in the update_ui() function) # also triggers this change", "== 'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label)", "1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\"", "grid_size # Set images to be shown (in that order) if img_indices ==", "self.w_labels.append(w_label) # Initialize zoom buttons w_button = widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width =", "in range(0, len(list_1D), n)] else: raise Exception('Unknown list split method') return list_2D #", "layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider", "if self.bo_updating_ui is False. if obj['type'] == 'change' and obj['name'] == 'value' and", "\"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility =", "range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize", "'420px' # ------------ # UI - final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img,", "img_indices=[]): self.dataset = dataset self.context = context self.grid_size = grid_size # Set images", "grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context = context self.grid_size = grid_size #", "\"image page\" slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception as", "w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1,", "label dropdown menu def dropdown_changed(obj): # Note that updating the dropdown label in", "w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index)", "pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers # ------------ def w_imread(img_obj, context): img_bytes", "list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in dataset.labels]) self.labels = self.dataset.labels self.label_options", "from ipywidgets import widgets, Layout, IntSlider import io from cvtk import ClassificationDataset, Label", "l in self.labels: self.label_options[l.name] = l # Initialize what images are on what", "= self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property is ignored and not accessible", "+ n] for i in range(0, len(list_1D), n)] else: raise Exception('Unknown list split", "= str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' #", "obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\" button def img_button_pressed(obj): img_index = int(obj.value)", "# ------------ # UI - final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area],", "= \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid widget", "= list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in dataset.labels]) self.labels = self.dataset.labels", "------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\" # should not be", "= grid_size # Set images to be shown (in that order) if img_indices", "\"next images\" or \"previous images\" buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index =", "= sorted([l.name for l in dataset.labels]) self.labels = self.dataset.labels self.label_options = {} for", "accessible later in code w_label.description = \"Image \" + str(img_index) w_button.description = \"Zoom\"", "dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width = '200px'", "= self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for \"next images\" or \"previous images\" buttons", "in the update_ui() function) # also triggers this change event. Hence need to", "min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback for \"image page\" slider def page_slider_changed(obj):", "check if self.bo_updating_ui is False. if obj['type'] == 'change' and obj['name'] == 'value'", "= self.create_ui() # Update / redraw the zoom UI elements def update_zoom_ui(self, img_obj,", "------------------------------------------------ # Class - Image annotation UI # ------------------------------------------------- class AnnotationUI(object): # Init", "self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create UI", "'500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button,", "create_ui(self): # ------------ # Callbacks # ------------ # Callback for image label dropdown", "is ignored and not accessible later in code w_label.description = \"Image \" +", "\"1\" # should not be necessary but bug on some jupyter versions otherwise", "new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\" button def img_button_pressed(obj): img_index", "w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0]", "w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\" # should not be necessary", "'change' and obj['name'] == 'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label =", "w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ #", "Helpers # ------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes def", "l in dataset.labels]) self.labels = self.dataset.labels self.label_options = {} for l in self.labels:", "= str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index) # Update / redraw all UI", "zoom UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value =", "self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\" button def img_button_pressed(obj): img_index = int(obj.value) img_obj", "self.page_index += int(obj.value) self.page_index = max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1)", "text=\"Image 0\", description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom", "img_index) # Callback for \"next images\" or \"previous images\" buttons def page_button_pressed(obj): self.page_index", "img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(',", "UI) self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create", "# ------------ # Callbacks # ------------ # Callback for image label dropdown menu", "UI - final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color", "and obj['name'] == 'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value", "img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible'", "== 'change' and obj['name'] == 'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label", "bug on some jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed)", "self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height", "= 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width =", "#sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers # ------------ def w_imread(img_obj,", "'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]],", "be shown (in that order) if img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices)", "import randomizeList # ------------ # Helpers # ------------ def w_imread(img_obj, context): img_bytes =", "list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create UI self.ui = self.create_ui() # Update", "len(self.page_img_indices) - 1) self.update_ui() # Callback for \"image page\" slider def page_slider_changed(obj): try:", "#random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in dataset.labels]) self.labels = self.dataset.labels self.label_options =", "'200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button = widgets.Button(description=\"Image id: \",", "False. if obj['type'] == 'change' and obj['name'] == 'value' and not self.bo_updating_ui: img_index", "or \"previous images\" buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0, self.page_index)", "dataset.labels]) self.labels = self.dataset.labels self.label_options = {} for l in self.labels: self.label_options[l.name] =", "= 'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value", "= False # ------------ # UI - image grid # ------------ self.w_imgs =", "from cvtk import ClassificationDataset, Label #import sys #import bqplot, IPython, random #from IPython.display", "def dropdown_changed(obj): # Note that updating the dropdown label in code (e.g. in", "w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ # UI - final", "0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create UI self.ui =", "elements def update_ui(self): self.bo_updating_ui = True # indicate code is in updating-UI state", "Update / redraw the zoom UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value =", "zoom window # ------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\" #", "# Update image grid UI img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0] *", "= open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower():", "Exception('Unknown list split method') return list_2D # ------------------------------------------------ # Class - Image annotation", "is in updating-UI state # Update image grid UI img_indices = self.page_img_indices[self.page_index] for", "jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous", "Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create all UI", "return list_2D # ------------------------------------------------ # Class - Image annotation UI # ------------------------------------------------- class", "== []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in dataset.labels])", "redraw the zoom UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context)", "2), img_indices=[]): self.dataset = dataset self.context = context self.grid_size = grid_size # Set", "self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header", "img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\" button", "the UI) self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') #", "'\\n') self.w_page_slider.value = str(self.page_index) # Update / redraw all UI elements def update_ui(self):", "in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI", "== grid of images on the right side of the UI) self.page_index =", "self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value =", "False # Create all UI elements def create_ui(self): # ------------ # Callbacks #", "import display #from bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList", "image label dropdown menu def dropdown_changed(obj): # Note that updating the dropdown label", "[] w_img_label_buttons = [] for i in range(self.grid_size[0] * self.grid_size[1]): # Initialize images", "# (page == grid of images on the right side of the UI)", "# Callback for \"zoom\" button def img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index]", "{} for l in self.labels: self.label_options[l.name] = l # Initialize what images are", "w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px'", "l # Initialize what images are on what image page # (page ==", "------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D, n,", "context self.grid_size = grid_size # Set images to be shown (in that order)", "# Init object and define instance variables def __init__(self, dataset, context, grid_size=(3, 2),", "w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this", "widget w_grid_HBoxes = [] for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1]", "cvtk import ClassificationDataset, Label #import sys #import bqplot, IPython, random #from IPython.display import", "# ------------------------------------------------ # Class - Image annotation UI # ------------------------------------------------- class AnnotationUI(object): #", "self.dataset = dataset self.context = context self.grid_size = grid_size # Set images to", "\"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid widget w_img_label_button", "i in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button =", "Set images to be shown (in that order) if img_indices == []: img_indices", "\"previous images\" buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0, self.page_index) self.page_index", "images\" buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0, self.page_index) self.page_index =", "grid # ------------ self.w_imgs = [] self.w_labels = [] self.w_buttons = [] w_img_label_buttons", "page\" slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception as e:", "try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception as e: pass # Init self.bo_updating_ui", "#: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index) # Update /", "widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox)", "int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for \"next images\" or \"previous", "= widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c in range(self.grid_size[1])]) hbox.layout.padding = '10px'", "self.w_page_slider.value = str(self.page_index) # Update / redraw all UI elements def update_ui(self): self.bo_updating_ui", "'230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes = [] for r in range(self.grid_size[0]):", "------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style =", "= 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI with actual", "def list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i + n]", "= False # Create all UI elements def create_ui(self): # ------------ # Callbacks", "= \"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index) #", "self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area", "description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color", "= widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider])", "- zoom window # ------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\"", "widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px'", "self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ',", "'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value =", "Exception as e: pass # Init self.bo_updating_ui = False # ------------ # UI", "\"zoom\" button def img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) #", "is False. if obj['type'] == 'change' and obj['name'] == 'value' and not self.bo_updating_ui:", "------------ # Callback for image label dropdown menu def dropdown_changed(obj): # Note that", "dropdown menu def dropdown_changed(obj): # Note that updating the dropdown label in code", "i in range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img)", "context): img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D, n, method): if method.lower()", "page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) -", "Callback for image label dropdown menu def dropdown_changed(obj): # Note that updating the", "n)] else: raise Exception('Unknown list split method') return list_2D # ------------------------------------------------ # Class", "widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui =", "= [] self.w_labels = [] self.w_buttons = [] w_img_label_buttons = [] for i", "list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for", "on the right side of the UI) self.page_index = 0 self.page_img_indices = list_split(img_indices,", "w_label = self.w_labels[i] w_button = self.w_buttons[i] if i < len(img_indices): img_index = img_indices[i]", "= widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ # UI", "self.label_options[l.name] = l # Initialize what images are on what image page #", "self.ui = self.create_ui() # Update / redraw the zoom UI elements def update_zoom_ui(self,", "grid UI img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]): w_img =", "code (e.g. in the update_ui() function) # also triggers this change event. Hence", "import widgets, Layout, IntSlider import io from cvtk import ClassificationDataset, Label #import sys", "self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback for \"image page\" slider", "grid_size[0] * grid_size[1], method='fillFirst') # Create UI self.ui = self.create_ui() # Update /", "e: pass # Init self.bo_updating_ui = False # ------------ # UI - image", "update_ui() function) # also triggers this change event. Hence need to check if", "self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]),", "obj['name'] == 'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index],", "= widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style", "self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback for \"image page\"", "\"Image \" + str(img_index) w_button.description = \"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility =", "= max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback for", "open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower(): list_2D", "w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0])", "else: raise Exception('Unknown list split method') return list_2D # ------------------------------------------------ # Class -", "images on the right side of the UI) self.page_index = 0 self.page_img_indices =", "w_label.description = \"Image \" + str(img_index) w_button.description = \"Zoom\" w_button.value = str(img_index) else:", "split method') return list_2D # ------------------------------------------------ # Class - Image annotation UI #", "obj['type'] == 'change' and obj['name'] == 'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:])", "- image grid # ------------ self.w_imgs = [] self.w_labels = [] self.w_buttons =", "except Exception as e: pass # Init self.bo_updating_ui = False # ------------ #", "in self.labels: self.label_options[l.name] = l # Initialize what images are on what image", "side of the UI) self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1],", "buttons w_button = widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width = \"100px\" w_button.button_style = 'warning'", "= widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width = \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button)", "list_2D = [list_1D[i:i + n] for i in range(0, len(list_1D), n)] else: raise", "'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px'", "updating the dropdown label in code (e.g. in the update_ui() function) # also", "= widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes", "import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers", "= 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid widget w_img_label_button = widgets.VBox(children=[w_button,", "ipywidgets import widgets, Layout, IntSlider import io from cvtk import ClassificationDataset, Label #import", "<reponame>Azure-Samples/MachineLearningSamples-AMLVisionPackage-ISICImageClassification<gh_stars>1-10 from ipywidgets import widgets, Layout, IntSlider import io from cvtk import ClassificationDataset,", "what images are on what image page # (page == grid of images", "this property is ignored and not accessible later in code w_label.description = \"Image", "self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider", "= widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image", "state # Update image grid UI img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0]", "w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes = []", "for \"image page\" slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception", "i < len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility", "n, method): if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for i", "shown (in that order) if img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels", "self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create UI self.ui = self.create_ui()", "Update image grid UI img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]):", "range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c in range(self.grid_size[1])]) hbox.layout.padding", "* grid_size[1], method='fillFirst') # Create UI self.ui = self.create_ui() # Update / redraw", "# combine into image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width =", "range(0, len(list_1D), n)] else: raise Exception('Unknown list split method') return list_2D # ------------------------------------------------", "should not be necessary but bug on some jupyter versions otherwise w_next_page_button.layout.width =", "# ------------ self.w_imgs = [] self.w_labels = [] self.w_buttons = [] w_img_label_buttons =", "'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description =", "this change event. Hence need to check if self.bo_updating_ui is False. if obj['type']", "the update_ui() function) # also triggers this change event. Hence need to check", "Callbacks # ------------ # Callback for image label dropdown menu def dropdown_changed(obj): #", "= widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\" # should not be necessary but", "w_button = widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width = \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed)", "(page == grid of images on the right side of the UI) self.page_index", "------------ # Helpers # ------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read() return", "# UI - final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid])", "IntSlider import io from cvtk import ClassificationDataset, Label #import sys #import bqplot, IPython,", "+ str(img_index) w_button.description = \"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility", "self.bo_updating_ui = False # Create all UI elements def create_ui(self): # ------------ #", "to check if self.bo_updating_ui is False. if obj['type'] == 'change' and obj['name'] ==", "self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui])", "if img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l", "= self.dataset.labels self.label_options = {} for l in self.labels: self.label_options[l.name] = l #", "w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False,", "def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index)", "widgets, Layout, IntSlider import io from cvtk import ClassificationDataset, Label #import sys #import", "function) # also triggers this change event. Hence need to check if self.bo_updating_ui", "self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img", "# Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create all", "step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color", "self.create_ui() # Update / redraw the zoom UI elements def update_zoom_ui(self, img_obj, img_index):", "elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #:", "= '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1,", "= \"Image \" + str(img_index) w_button.description = \"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility", "code w_label.description = \"Image \" + str(img_index) w_button.description = \"Zoom\" w_button.value = str(img_index)", "#w_label.text = str(img_index) # this property is ignored and not accessible later in", "value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color =", "= 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\"", "int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\" button def img_button_pressed(obj):", "UI - image grid # ------------ self.w_imgs = [] self.w_labels = [] self.w_buttons", "w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name],", "= '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes = [] for r in", "bqplot, IPython, random #from IPython.display import display #from bqplot import pyplot as bqPyplot", "'120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value", "dropdown label in code (e.g. in the update_ui() function) # also triggers this", "indicate code is in updating-UI state # Update image grid UI img_indices =", "= self.w_buttons[i] if i < len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility", "self.w_buttons[i] if i < len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility =", "Class - Image annotation UI # ------------------------------------------------- class AnnotationUI(object): # Init object and", "random #from IPython.display import display #from bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from", "self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i] if i <", "w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index)", "self.context = context self.grid_size = grid_size # Set images to be shown (in", "== 'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for i in range(0, len(list_1D), n)]", "import io from cvtk import ClassificationDataset, Label #import sys #import bqplot, IPython, random", "{}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index) # Update / redraw", "self.page_index = max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback", "str(img_index) w_button.description = \"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility =", "self.labels = self.dataset.labels self.label_options = {} for l in self.labels: self.label_options[l.name] = l", "need to check if self.bo_updating_ui is False. if obj['type'] == 'change' and obj['name']", "for c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------", "IPython.display import display #from bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import", "UI # ------------------------------------------------- class AnnotationUI(object): # Init object and define instance variables def", "widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom window # ------------ w_next_page_button = widgets.Button(description=\"Next", "# Update / redraw the zoom UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value", "'500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width =", "image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) #", "= int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for \"next images\" or", "image page # (page == grid of images on the right side of", "# Update / redraw all UI elements def update_ui(self): self.bo_updating_ui = True #", "\"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px'", "[]: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in dataset.labels]) self.labels", "# Callback for image label dropdown menu def dropdown_changed(obj): # Note that updating", "# Create UI self.ui = self.create_ui() # Update / redraw the zoom UI", "max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width", "len(list_1D), n)] else: raise Exception('Unknown list split method') return list_2D # ------------------------------------------------ #", "dropdown_changed(obj): # Note that updating the dropdown label in code (e.g. in the", "True # indicate code is in updating-UI state # Update image grid UI", "but bug on some jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary'", "# Note that updating the dropdown label in code (e.g. in the update_ui()", "Label #import sys #import bqplot, IPython, random #from IPython.display import display #from bqplot", "self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj,", "Image grid widget w_grid_HBoxes = [] for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r", "= dataset self.context = context self.grid_size = grid_size # Set images to be", "Initialize what images are on what image page # (page == grid of", "'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image page:')", "self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange'", "img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D, n, method): if method.lower() ==", "self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property is ignored and not accessible later", "img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for \"next images\"", "width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image", "# Callbacks # ------------ # Callback for image label dropdown menu def dropdown_changed(obj):", "'visible' w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text =", "Init object and define instance variables def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]):", "# Callback for \"image page\" slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui()", "w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden'", "#from pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers # ------------ def w_imread(img_obj, context):", "img_bytes def list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i +", "for image label dropdown menu def dropdown_changed(obj): # Note that updating the dropdown", "UI img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i]", "'120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index,", "w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed)", "for l in self.labels: self.label_options[l.name] = l # Initialize what images are on", "UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image", "names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button = widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width", "annotation UI # ------------------------------------------------- class AnnotationUI(object): # Init object and define instance variables", "on some jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button", "images\" or \"previous images\" buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0,", "# self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ # UI - final # ------------", "Create all UI elements def create_ui(self): # ------------ # Callbacks # ------------ #", "= list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create UI self.ui = self.create_ui() #", "images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options,", "define instance variables def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset", "= '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button = widgets.Button(description=\"Image id:", "= 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text", "self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0,", "annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI with", "Initialize images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus w_label =", "on what image page # (page == grid of images on the right", "= widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img =", "w_button.description = \"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden'", "for \"zoom\" button def img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index)", "annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid'", "\"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index) # Update", "', '\\n') self.w_page_slider.value = str(self.page_index) # Update / redraw all UI elements def", "import ClassificationDataset, Label #import sys #import bqplot, IPython, random #from IPython.display import display", "class AnnotationUI(object): # Init object and define instance variables def __init__(self, dataset, context,", "= 'hidden' w_label.layout.visibility = 'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui", "img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility", "new_label) # Callback for \"zoom\" button def img_button_pressed(obj): img_index = int(obj.value) img_obj =", "page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception as e: pass # Init", "combine into image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px'", "'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI with actual images", "self.bo_updating_ui = True # indicate code is in updating-UI state # Update image", "r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c in", "str(self.page_index) # Update / redraw all UI elements def update_ui(self): self.bo_updating_ui = True", "int(obj.value) self.page_index = max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() #", "if obj['type'] == 'change' and obj['name'] == 'value' and not self.bo_updating_ui: img_index =", "* self.grid_size[1] + c] for c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid", "[] for i in range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200,", "w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes = [] for r in range(self.grid_size[0]): hbox", "'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width", "= '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue'))", "= self.w_labels[i] w_button = self.w_buttons[i] if i < len(img_indices): img_index = img_indices[i] img_obj", "continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white'", "triggers this change event. Hence need to check if self.bo_updating_ui is False. if", "'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI", "update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value", "list_2D # ------------------------------------------------ # Class - Image annotation UI # ------------------------------------------------- class AnnotationUI(object):", "= w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n')", "w_img_label_buttons = [] for i in range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img", "UI elements def update_ui(self): self.bo_updating_ui = True # indicate code is in updating-UI", "for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c", "for \"next images\" or \"previous images\" buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index", "page # (page == grid of images on the right side of the", "w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context)", "self.w_buttons.append(w_button) # combine into image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width", "img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in", "[] self.w_buttons = [] w_img_label_buttons = [] for i in range(self.grid_size[0] * self.grid_size[1]):", "def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices)", "img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for l in dataset.labels]) self.labels =", "* self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i] if i", "= '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom window", "value=\"1\") w_next_page_button.value = \"1\" # should not be necessary but bug on some", "# indicate code is in updating-UI state # Update image grid UI img_indices", "0\", description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons", "self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i] if i < len(img_indices): img_index =", "in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i]", "Callback for \"image page\" slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except", "# ------------ # UI - zoom window # ------------ w_next_page_button = widgets.Button(description=\"Next images\",", "= '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' #", "# Callback for \"next images\" or \"previous images\" buttons def page_button_pressed(obj): self.page_index +=", "= 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image", "# Init self.bo_updating_ui = False # ------------ # UI - image grid #", "= widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui", "self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index) # Update / redraw all", "int(obj['new']['value']) self.update_ui() except Exception as e: pass # Init self.bo_updating_ui = False #", "# ------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\" # should not", "not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for", "Callback for \"next images\" or \"previous images\" buttons def page_button_pressed(obj): self.page_index += int(obj.value)", "n] for i in range(0, len(list_1D), n)] else: raise Exception('Unknown list split method')", "as e: pass # Init self.bo_updating_ui = False # ------------ # UI -", "= IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header =", "widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid", "self.dataset.labels self.label_options = {} for l in self.labels: self.label_options[l.name] = l # Initialize", "+ c] for c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes)", "method): if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for i in", "method') return list_2D # ------------------------------------------------ # Class - Image annotation UI # -------------------------------------------------", "that updating the dropdown label in code (e.g. in the update_ui() function) #", "all UI elements def create_ui(self): # ------------ # Callbacks # ------------ # Callback", "'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea()", "w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button = widgets.Button(description=\"Image", "w_next_page_button.value = \"1\" # should not be necessary but bug on some jupyter", "for l in dataset.labels]) self.labels = self.dataset.labels self.label_options = {} for l in", "#import bqplot, IPython, random #from IPython.display import display #from bqplot import pyplot as", "# Class - Image annotation UI # ------------------------------------------------- class AnnotationUI(object): # Init object", "w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property is", "= 'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False #", "what image page # (page == grid of images on the right side", "w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label])", "= [] for i in range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img =", "'hidden' w_label.layout.visibility = 'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui =", "# this property is ignored and not accessible later in code w_label.description =", "= [] self.w_buttons = [] w_img_label_buttons = [] for i in range(self.grid_size[0] *", "w_imread(img_obj, self.context) self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value", "'value' and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) #", "[list_1D[i:i + n] for i in range(0, len(list_1D), n)] else: raise Exception('Unknown list", "that order) if img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name", "= [] w_img_label_buttons = [] for i in range(self.grid_size[0] * self.grid_size[1]): # Initialize", "final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black'", "------------------------------------------------- class AnnotationUI(object): # Init object and define instance variables def __init__(self, dataset,", "redraw all UI elements def update_ui(self): self.bo_updating_ui = True # indicate code is", "str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index) # Update / redraw all UI elements", "display #from bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList #", "for i in range(0, len(list_1D), n)] else: raise Exception('Unknown list split method') return", "w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0,", "= w_imread(img_obj, self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) #", "img_indices[0]) self.bo_updating_ui = False # Create all UI elements def create_ui(self): # ------------", "be necessary but bug on some jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style", "self.w_labels[i] w_button = self.w_buttons[i] if i < len(img_indices): img_index = img_indices[i] img_obj =", "self.w_imgs = [] self.w_labels = [] self.w_buttons = [] w_img_label_buttons = [] for", "#import sys #import bqplot, IPython, random #from IPython.display import display #from bqplot import", "self.update_ui() # Callback for \"image page\" slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value'])", "Update / redraw all UI elements def update_ui(self): self.bo_updating_ui = True # indicate", "= int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\" button def", "self.update_zoom_ui(img_obj, img_index) # Callback for \"next images\" or \"previous images\" buttons def page_button_pressed(obj):", "slider def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception as e: pass", "as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers # ------------", "elements def create_ui(self): # ------------ # Callbacks # ------------ # Callback for image", "widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\" # should not be necessary but bug", "= img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility =", "Callback for \"zoom\" button def img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj,", "value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed)", "def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D, n, method):", "self.update_ui() except Exception as e: pass # Init self.bo_updating_ui = False # ------------", "img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value", "(e.g. in the update_ui() function) # also triggers this change event. Hence need", "menu def dropdown_changed(obj): # Note that updating the dropdown label in code (e.g.", "------------ # UI - final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520),", "= self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value =", "= \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width =", "description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button", "Create UI self.ui = self.create_ui() # Update / redraw the zoom UI elements", "io from cvtk import ClassificationDataset, Label #import sys #import bqplot, IPython, random #from", "list split method') return list_2D # ------------------------------------------------ # Class - Image annotation UI", "= [list_1D[i:i + n] for i in range(0, len(list_1D), n)] else: raise Exception('Unknown", "w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i] if i < len(img_indices):", "w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white',", "Image annotation UI # ------------------------------------------------- class AnnotationUI(object): # Init object and define instance", "context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context = context self.grid_size = grid_size", "self.w_buttons = [] w_img_label_buttons = [] for i in range(self.grid_size[0] * self.grid_size[1]): #", "property is ignored and not accessible later in code w_label.description = \"Image \"", "in code (e.g. in the update_ui() function) # also triggers this change event.", "w_grid_HBoxes = [] for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] +", "button def img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback", "- final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color =", "'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img,", "background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider =", "IPython, random #from IPython.display import display #from bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\")", "widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width = \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) #", "self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\"", "self.w_zoom_header.value = \"Image #: {}\".format(img_index) self.w_zoom_text_area.value = str(img_obj).replace(', ', '\\n') self.w_page_slider.value = str(self.page_index)", "'100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------", "are on what image page # (page == grid of images on the", "len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility = 'visible'", "for i in range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200, description=\"\")", "= int(obj['new']['value']) self.update_ui() except Exception as e: pass # Init self.bo_updating_ui = False", "------------ # UI - image grid # ------------ self.w_imgs = [] self.w_labels =", "w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom window # ------------", "def create_ui(self): # ------------ # Callbacks # ------------ # Callback for image label", "also triggers this change event. Hence need to check if self.bo_updating_ui is False.", "# Initialize zoom buttons w_button = widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width = \"100px\"", "# ------------ # Callback for image label dropdown menu def dropdown_changed(obj): # Note", "w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button = widgets.Button(description=\"Image id: \", value=\"\")", "(in that order) if img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels =", "later in code w_label.description = \"Image \" + str(img_index) w_button.description = \"Zoom\" w_button.value", "Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width =", "1) self.update_ui() # Callback for \"image page\" slider def page_slider_changed(obj): try: self.page_index =", "Hence need to check if self.bo_updating_ui is False. if obj['type'] == 'change' and", "* self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown", "widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ # UI -", "# Create all UI elements def create_ui(self): # ------------ # Callbacks # ------------", "not accessible later in code w_label.description = \"Image \" + str(img_index) w_button.description =", "AnnotationUI(object): # Init object and define instance variables def __init__(self, dataset, context, grid_size=(3,", "= \"Zoom\" w_button.value = str(img_index) else: w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility", "order) if img_indices == []: img_indices = list(range(len(dataset.images))) #random.shuffle(img_indices) ##############self.labels = sorted([l.name for", "def page_slider_changed(obj): try: self.page_index = int(obj['new']['value']) self.update_ui() except Exception as e: pass #", "w_zoom_button_slider.layout.width = '420px' # ------------ # UI - final # ------------ annotation_ui =", "ClassificationDataset, Label #import sys #import bqplot, IPython, random #from IPython.display import display #from", "images to be shown (in that order) if img_indices == []: img_indices =", "0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize zoom buttons w_button =", "object and define instance variables def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset", "the dropdown label in code (e.g. in the update_ui() function) # also triggers", "annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') #", "= \"1\" # should not be necessary but bug on some jupyter versions", "# Image grid widget w_grid_HBoxes = [] for r in range(self.grid_size[0]): hbox =", "/ redraw the zoom UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj,", "= {} for l in self.labels: self.label_options[l.name] = l # Initialize what images", "the right side of the UI) self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0]", "= str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property is ignored", "Init self.bo_updating_ui = False # ------------ # UI - image grid # ------------", "# ------------ # UI - image grid # ------------ self.w_imgs = [] self.w_labels", "grid widget w_grid_HBoxes = [] for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r *", "in code w_label.description = \"Image \" + str(img_index) w_button.description = \"Zoom\" w_button.value =", "self.bo_updating_ui is False. if obj['type'] == 'change' and obj['name'] == 'value' and not", "UI self.ui = self.create_ui() # Update / redraw the zoom UI elements def", "= obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback for \"zoom\" button def img_button_pressed(obj): img_index =", "'10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom window #", "self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus", "max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback for \"image", "not be necessary but bug on some jupyter versions otherwise w_next_page_button.layout.width = '120px'", "the zoom UI elements def update_zoom_ui(self, img_obj, img_index): self.w_zoom_img.value = w_imread(img_obj, self.context) self.w_zoom_header.value", "w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation')", "grid_size[1], method='fillFirst') # Create UI self.ui = self.create_ui() # Update / redraw the", "= self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i] if i < len(img_indices): img_index", "= 0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst') # Create UI self.ui", "= self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label =", "Initialize zoom buttons w_button = widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width = \"100px\" w_button.button_style", "w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom window # ------------ w_next_page_button", "str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property is ignored and", "UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create all UI elements def create_ui(self):", "__init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context = context self.grid_size", "# UI - image grid # ------------ self.w_imgs = [] self.w_labels = []", "method='fillFirst') # Create UI self.ui = self.create_ui() # Update / redraw the zoom", "def img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for", "sys #import bqplot, IPython, random #from IPython.display import display #from bqplot import pyplot", "img_button_pressed(obj): img_index = int(obj.value) img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for \"next", "to be shown (in that order) if img_indices == []: img_indices = list(range(len(dataset.images)))", "self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header", "in dataset.labels]) self.labels = self.dataset.labels self.label_options = {} for l in self.labels: self.label_options[l.name]", "change event. Hence need to check if self.bo_updating_ui is False. if obj['type'] ==", "w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes = [] for", "# also triggers this change event. Hence need to check if self.bo_updating_ui is", "dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context = context self.grid_size =", "updating-UI state # Update image grid UI img_indices = self.page_img_indices[self.page_index] for i in", "in range(self.grid_size[0] * self.grid_size[1]): # Initialize images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) #", "[] self.w_labels = [] self.w_buttons = [] w_img_label_buttons = [] for i in", "pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers #", "= widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label)", "tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI with actual images self.update_ui()", "w_button.layout.width = \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid", "- 1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width =", "method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for i in range(0, len(list_1D),", "w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value =", "= widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height =", "widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) #", "bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList # ------------ #", "[] for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for", "= [] for r in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c]", "if i < len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible'", "self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for \"next images\" or \"previous images\" buttons def", "zoom buttons w_button = widgets.Button(description=\"Image id: \", value=\"\") w_button.layout.width = \"100px\" w_button.button_style =", "= l # Initialize what images are on what image page # (page", "code is in updating-UI state # Update image grid UI img_indices = self.page_img_indices[self.page_index]", "pass # Init self.bo_updating_ui = False # ------------ # UI - image grid", "- Image annotation UI # ------------------------------------------------- class AnnotationUI(object): # Init object and define", "images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary'", "right side of the UI) self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0] *", "/ redraw all UI elements def update_ui(self): self.bo_updating_ui = True # indicate code", "# ------------ # Helpers # ------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read()", "of images on the right side of the UI) self.page_index = 0 self.page_img_indices", "widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image()", "= str(self.page_index) # Update / redraw all UI elements def update_ui(self): self.bo_updating_ui =", "description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image", "grid widget w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image", "= 'black' annotation_ui.layout.border_style = 'solid' tabs_ui = widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update", "zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create all UI elements", "#from IPython.display import display #from bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2", "w_button = self.w_buttons[i] if i < len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index]", "images\", value=\"1\") w_next_page_button.value = \"1\" # should not be necessary but bug on", "= '500px' self.w_zoom_text_area = widgets.Textarea() self.w_zoom_text_area.layout.width = '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider =", "range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i] w_button = self.w_buttons[i] if", "self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ # UI - final #", "def update_ui(self): self.bo_updating_ui = True # indicate code is in updating-UI state #", "update_ui(self): self.bo_updating_ui = True # indicate code is in updating-UI state # Update", "image grid # ------------ self.w_imgs = [] self.w_labels = [] self.w_buttons = []", "hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom", "if method.lower() == 'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for i in range(0,", "c] for c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) #", "versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\",", "'fillFirst'.lower(): list_2D = [list_1D[i:i + n] for i in range(0, len(list_1D), n)] else:", "img_obj = self.dataset.images[img_index] self.update_zoom_ui(img_obj, img_index) # Callback for \"next images\" or \"previous images\"", "w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\", layout=Layout(color='white', background_color='lightblue')) w_previous_page_button.value = \"-1\" w_previous_page_button.layout.width =", "self.w_labels = [] self.w_buttons = [] w_img_label_buttons = [] for i in range(self.grid_size[0]", "bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList # ------------ # Helpers # ------------ def", "= 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' # Update zoom image UI", "= 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width = '500px' self.w_zoom_text_area =", "self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create all UI elements def create_ui(self): #", "w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget w_grid_HBoxes = [] for r", "< len(img_indices): img_index = img_indices[i] img_obj = self.dataset.images[img_index] w_img.layout.visibility = 'visible' w_button.layout.visibility =", "\"rb\").read() return img_bytes def list_split(list_1D, n, method): if method.lower() == 'fillFirst'.lower(): list_2D =", "widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\",", "self.context) w_img.description = str(img_index) w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property", "# ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider, self.w_zoom_img, self.w_zoom_text_area], width=520), w_img_grid]) annotation_ui.layout.border_color = 'black' annotation_ui.layout.border_style", "some jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button =", "c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid = widgets.VBox(w_grid_HBoxes) # ------------ #", "UI - zoom window # ------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value =", "= '500px' self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width", "of the UI) self.page_index = 0 self.page_img_indices = list_split(img_indices, grid_size[0] * grid_size[1], method='fillFirst')", "grid of images on the right side of the UI) self.page_index = 0", "self.w_zoom_text_area.layout.height = '100px' w_zoom_button_slider = widgets.HBox([widgets.VBox([w_next_page_button, w_previous_page_button]), self.w_page_slider]) # self.w_zoom_header w_zoom_button_slider.layout.width = '420px'", "self.grid_size = grid_size # Set images to be shown (in that order) if", "= 'visible' w_button.layout.visibility = 'visible' w_label.layout.visibility = 'visible' w_img.value = w_imread(img_obj, self.context) w_img.description", "otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style = 'primary' w_next_page_button.on_click(page_button_pressed) w_previous_page_button = widgets.Button(description=\"Previous images\", value=\"-1\",", "##############self.labels = sorted([l.name for l in dataset.labels]) self.labels = self.dataset.labels self.label_options = {}", "self.label_options = {} for l in self.labels: self.label_options[l.name] = l # Initialize what", "value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value') self.w_labels.append(w_label) # Initialize", "self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]): w_img = self.w_imgs[i] w_label = self.w_labels[i]", "\"-1\" w_previous_page_button.layout.width = '120px' w_previous_page_button.button_style = 'primary' w_previous_page_button.on_click(page_button_pressed) self.w_page_slider = IntSlider(min=0, max=len(self.page_img_indices) -", "id: \", value=\"\") w_button.layout.width = \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine", "widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI with actual images self.update_ui() return (tabs_ui)", "= str(img_index) # this property is ignored and not accessible later in code", "# Initialize images w_img = widgets.Image(width=200, description=\"\") self.w_imgs.append(w_img) # Initialize dropdown menus w_label", "# ------------------------------------------------- class AnnotationUI(object): # Init object and define instance variables def __init__(self,", "image grid UI img_indices = self.page_img_indices[self.page_index] for i in range(self.grid_size[0] * self.grid_size[1]): w_img", "# should not be necessary but bug on some jupyter versions otherwise w_next_page_button.layout.width", "# Initialize what images are on what image page # (page == grid", "w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image grid widget w_img_label_button =", "= '420px' # ------------ # UI - final # ------------ annotation_ui = widgets.HBox(children=[widgets.VBox(children=[w_zoom_button_slider,", "self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color = 'orange' self.w_zoom_img = widgets.Image() self.w_zoom_img.layout.width", "page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\") self.w_zoom_header.layout.width = \"100px\" self.w_zoom_header.layout.color = 'white' self.w_zoom_header.layout.background_color =", "w_img.layout.visibility = 'hidden' w_button.layout.visibility = 'hidden' w_label.layout.visibility = 'hidden' # Update zoom image", "= context self.grid_size = grid_size # Set images to be shown (in that", "and not accessible later in code w_label.description = \"Image \" + str(img_index) w_button.description", "self.grid_size[1] + c] for c in range(self.grid_size[1])]) hbox.layout.padding = '10px' w_grid_HBoxes.append(hbox) w_img_grid =", "= widgets.Tab(children=[annotation_ui]) tabs_ui.set_title(0, 'Image Annotation') # Update UI with actual images self.update_ui() return", "self.w_zoom_header w_zoom_button_slider.layout.width = '420px' # ------------ # UI - final # ------------ annotation_ui", "self.bo_updating_ui = False # ------------ # UI - image grid # ------------ self.w_imgs", "= min(self.page_index, len(self.page_img_indices) - 1) self.update_ui() # Callback for \"image page\" slider def", "ignored and not accessible later in code w_label.description = \"Image \" + str(img_index)", "w_img_label_button = widgets.VBox(children=[w_button, w_img, w_label]) w_img_label_button.width = '230px' w_img_label_buttons.append(w_img_label_button) # Image grid widget", "i in range(0, len(list_1D), n)] else: raise Exception('Unknown list split method') return list_2D", "instance variables def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context", "and not self.bo_updating_ui: img_index = int(obj['owner'].description[6:]) new_label = obj['owner'].value self.dataset.change_label_for_image(self.dataset.images[img_index], new_label) # Callback", "value=\"\") w_button.layout.width = \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into image", "hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c in range(self.grid_size[1])]) hbox.layout.padding =", "w_label.value = self.dataset.get_labels_for_image(img_obj)[0] #w_label.text = str(img_index) # this property is ignored and not", "self.labels: self.label_options[l.name] = l # Initialize what images are on what image page", "randomizeList # ------------ # Helpers # ------------ def w_imread(img_obj, context): img_bytes = open(img_obj.storage_path,", "raise Exception('Unknown list split method') return list_2D # ------------------------------------------------ # Class - Image", "menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed,", "str(img_index) # this property is ignored and not accessible later in code w_label.description", "- 1) self.update_ui() # Callback for \"image page\" slider def page_slider_changed(obj): try: self.page_index", "and define instance variables def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset =", "def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context = context", "# UI - zoom window # ------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value", "'hidden' # Update zoom image UI self.update_zoom_ui(self.dataset.images[img_indices[0]], img_indices[0]) self.bo_updating_ui = False # Create", "#from bqplot import pyplot as bqPyplot #sys.path.append(\"C:\\\\Users\\\\pabuehle\\\\Desktop\\\\PROJECTS\\\\pythonLibrary\") #from pabuehle_utilities_general_v2 import randomizeList # ------------", "variables def __init__(self, dataset, context, grid_size=(3, 2), img_indices=[]): self.dataset = dataset self.context =", "------------ self.w_imgs = [] self.w_labels = [] self.w_buttons = [] w_img_label_buttons = []", "event. Hence need to check if self.bo_updating_ui is False. if obj['type'] == 'change'", "self.w_imgs.append(w_img) # Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\")", "+= int(obj.value) self.page_index = max(0, self.page_index) self.page_index = min(self.page_index, len(self.page_img_indices) - 1) self.update_ui()", "= widgets.VBox(w_grid_HBoxes) # ------------ # UI - zoom window # ------------ w_next_page_button =", "w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width = '200px' w_label.observe(dropdown_changed, names='value')", "------------ # Callbacks # ------------ # Callback for image label dropdown menu def", "window # ------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\") w_next_page_button.value = \"1\" # should", "in range(self.grid_size[0]): hbox = widgets.HBox(children=[w_img_label_buttons[r * self.grid_size[1] + c] for c in range(self.grid_size[1])])", "necessary but bug on some jupyter versions otherwise w_next_page_button.layout.width = '120px' w_next_page_button.button_style =", "w_imread(img_obj, context): img_bytes = open(img_obj.storage_path, \"rb\").read() return img_bytes def list_split(list_1D, n, method): if", "buttons def page_button_pressed(obj): self.page_index += int(obj.value) self.page_index = max(0, self.page_index) self.page_index = min(self.page_index,", "all UI elements def update_ui(self): self.bo_updating_ui = True # indicate code is in", "sorted([l.name for l in dataset.labels]) self.labels = self.dataset.labels self.label_options = {} for l", "= True # indicate code is in updating-UI state # Update image grid", "\", value=\"\") w_button.layout.width = \"100px\" w_button.button_style = 'warning' w_button.on_click(img_button_pressed) self.w_buttons.append(w_button) # combine into", "# Initialize dropdown menus w_label = widgets.Dropdown(options=self.label_options, value=self.label_options[self.labels[0].name], text=\"Image 0\", description=\"Image 0\") w_label.layout.width", "IntSlider(min=0, max=len(self.page_img_indices) - 1, step=1, value=self.page_index, continuous_update=False, description='Image page:') self.w_page_slider.observe(page_slider_changed) self.w_zoom_header = widgets.Text(\"\")", "------------ # UI - zoom window # ------------ w_next_page_button = widgets.Button(description=\"Next images\", value=\"1\")" ]
[ "script import sys print(sys.platform) print(2 ** 100) x = 'Spam!' print(x * 8)", "<reponame>caoghui/python<gh_stars>0 #A first python script import sys print(sys.platform) print(2 ** 100) x =", "#A first python script import sys print(sys.platform) print(2 ** 100) x = 'Spam!'", "print(sys.platform) print(2 ** 100) x = 'Spam!' print(x * 8) input('Press Enter to", "python script import sys print(sys.platform) print(2 ** 100) x = 'Spam!' print(x *", "sys print(sys.platform) print(2 ** 100) x = 'Spam!' print(x * 8) input('Press Enter", "import sys print(sys.platform) print(2 ** 100) x = 'Spam!' print(x * 8) input('Press", "print(2 ** 100) x = 'Spam!' print(x * 8) input('Press Enter to exit')", "first python script import sys print(sys.platform) print(2 ** 100) x = 'Spam!' print(x" ]
[ "exception that triggered the exit traceback: Traceback when exit was triggered ''' #", "content to the file open(path, 'w') needs to be called prior to calling", "to the file open(path, 'w') needs to be called prior to calling this", "prior to calling this function, typically by ````with open(file, 'w') as f: self.write_fn(f)````", "content if index < len(lines) - 1: self._path.write('\\n') self._start_of_line = True # If", "If the content should end in a newline, write it if end_in_newline: self._path.write('\\n')", "= True def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): ''' Args:", "function for further explanation. ''' self._indent_level += 1 return self def dedent(self): '''Dedent", "content should end in a newline, write it if end_in_newline: self._path.write('\\n') self._start_of_line =", "exit exception_value: Value of exception that triggered the exit traceback: Traceback when exit", "while writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent the writer by one level", "and path to write to ''' self._path = path self._indent_level = 0 self._start_of_line", "class. See documentation on the write() function for further explanation. ''' self._indent_level +=", "def dedent(self): '''Dedent the writer by one level To be used in a", "formatting''' def __init__(self, path): ''' Args: path (handle): File name and path to", "+= 1 return self def dedent(self): '''Dedent the writer by one level To", "as w: w.write('string to write') w.write(self.string_to_write)```` Args: content (str): Content to write, as", "'''Writer used to create source files with consistent formatting''' def __init__(self, path): '''", "if self._indent_level > 0: self._indent_level -= 1 return self def write(self, content='', end_in_newline=True):", "used to create source files with consistent formatting''' def __init__(self, path): ''' Args:", "Type of exception that triggered the exit exception_value: Value of exception that triggered", "a line if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the line self._path.write(line) #", "explanation. ''' self._indent_level += 1 return self def dedent(self): '''Dedent the writer by", "exit traceback: Traceback when exit was triggered ''' # Clear the path if", "Traceback when exit was triggered ''' # Clear the path if an uncaught", "write(self, content='', end_in_newline=True): ''' Write content to the file open(path, 'w') needs to", "of exception that triggered the exit traceback: Traceback when exit was triggered '''", "calling this function, typically by ````with open(file, 'w') as f: self.write_fn(f)```` where `self`", "````with open(file, 'w') as f: self.write_fn(f)```` where `self` is a higher level object", "`self` is a higher level object and `write_fn(self, file)` would look something like", "write a newline at the end Default is True. ''' lines = inspect.cleandoc(content).splitlines()", "w: w.write('string to write') w.write(self.string_to_write)```` Args: content (str): Content to write, as a", "level To be used in a similiar fashion to the write() function in", "inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): # Indent if the start of a", "# Clear the path if an uncaught exception occured while writing: if exception_type:", "consistent formatting''' def __init__(self, path): ''' Args: path (handle): File name and path", "one level To be used in a similiar fashion to the write() function", "`inspect.cleandoc()` end_in_newline (bool): Whether or not to write a newline at the end", "still more content if index < len(lines) - 1: self._path.write('\\n') self._start_of_line = True", "string Content is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not to", "the line self._path.write(line) # Write a new line if there's still more content", "exception occured while writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent the writer by", "_INDENT = ' ' * _INDENT_LEVEL class _Writer(object): '''Writer used to create source", "to write, as a string Content is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool):", "self.write_fn(f)```` where `self` is a higher level object and `write_fn(self, file)` would look", "fashion to the write() function in this class. See documentation on the write()", "cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not to write a newline", "if there's still more content if index < len(lines) - 1: self._path.write('\\n') self._start_of_line", "path self._indent_level = 0 self._start_of_line = True def __enter__(self): return self def __exit__(self,", "_INDENT_LEVEL class _Writer(object): '''Writer used to create source files with consistent formatting''' def", "= 2 _INDENT = ' ' * _INDENT_LEVEL class _Writer(object): '''Writer used to", "a string Content is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not", "write() function for further explanation. ''' if self._indent_level > 0: self._indent_level -= 1", "Default is True. ''' lines = inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): #", "self._start_of_line = True def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): '''", "Args: path (handle): File name and path to write to ''' self._path =", "````def _write_html(self, file): with _Writer(file) as w: w.write('string to write') w.write(self.string_to_write)```` Args: content", "__init__(self, path): ''' Args: path (handle): File name and path to write to", "> 0: self._indent_level -= 1 return self def write(self, content='', end_in_newline=True): ''' Write", "exception_type: Type of exception that triggered the exit exception_value: Value of exception that", "self._path = path self._indent_level = 0 self._start_of_line = True def __enter__(self): return self", "def __exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type: Type of exception that triggered", "triggered the exit traceback: Traceback when exit was triggered ''' # Clear the", "further explanation. ''' self._indent_level += 1 return self def dedent(self): '''Dedent the writer", "-= 1 return self def write(self, content='', end_in_newline=True): ''' Write content to the", "True. ''' lines = inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): # Indent if", "level object and `write_fn(self, file)` would look something like ````def _write_html(self, file): with", "that triggered the exit exception_value: Value of exception that triggered the exit traceback:", "self def __exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type: Type of exception that", "f: self.write_fn(f)```` where `self` is a higher level object and `write_fn(self, file)` would", "return self def write(self, content='', end_in_newline=True): ''' Write content to the file open(path,", "end_in_newline=True): ''' Write content to the file open(path, 'w') needs to be called", "traceback: Traceback when exit was triggered ''' # Clear the path if an", "the path if an uncaught exception occured while writing: if exception_type: self._path.truncate(0) def", "return self def __exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type: Type of exception", "level for writer _INDENT_LEVEL = 2 _INDENT = ' ' * _INDENT_LEVEL class", "was triggered ''' # Clear the path if an uncaught exception occured while", "' ' * _INDENT_LEVEL class _Writer(object): '''Writer used to create source files with", "by one level To be used in a similiar fashion to the write()", "used in a similiar fashion to the write() function in this class. See", "path to write to ''' self._path = path self._indent_level = 0 self._start_of_line =", "the write() function for further explanation. ''' if self._indent_level > 0: self._indent_level -=", "with _Writer(file) as w: w.write('string to write') w.write(self.string_to_write)```` Args: content (str): Content to", "newline, write it if end_in_newline: self._path.write('\\n') self._start_of_line = True else: self._start_of_line = False", "uncaught exception occured while writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent the writer", "the writer by one level To be used in a similiar fashion to", "in this class. See documentation on the write() function for further explanation. '''", "'w') as f: self.write_fn(f)```` where `self` is a higher level object and `write_fn(self,", "would look something like ````def _write_html(self, file): with _Writer(file) as w: w.write('string to", "where `self` is a higher level object and `write_fn(self, file)` would look something", "a newline at the end Default is True. ''' lines = inspect.cleandoc(content).splitlines() for", "function, typically by ````with open(file, 'w') as f: self.write_fn(f)```` where `self` is a", "class. See documentation on the write() function for further explanation. ''' if self._indent_level", "line if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the line self._path.write(line) # Write", "See documentation on the write() function for further explanation. ''' self._indent_level += 1", "write to ''' self._path = path self._indent_level = 0 self._start_of_line = True def", "name and path to write to ''' self._path = path self._indent_level = 0", "typically by ````with open(file, 'w') as f: self.write_fn(f)```` where `self` is a higher", "this class. See documentation on the write() function for further explanation. ''' if", "for further explanation. ''' if self._indent_level > 0: self._indent_level -= 1 return self", "' * _INDENT_LEVEL class _Writer(object): '''Writer used to create source files with consistent", "indent(self): '''Indent the writer by one level To be used in a similiar", "something like ````def _write_html(self, file): with _Writer(file) as w: w.write('string to write') w.write(self.string_to_write)````", "start of a line if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the line", "new line if there's still more content if index < len(lines) - 1:", "is a higher level object and `write_fn(self, file)` would look something like ````def", "this class. See documentation on the write() function for further explanation. ''' self._indent_level", "Write content to the file open(path, 'w') needs to be called prior to", "documentation on the write() function for further explanation. ''' self._indent_level += 1 return", "on the write() function for further explanation. ''' if self._indent_level > 0: self._indent_level", "writer _INDENT_LEVEL = 2 _INDENT = ' ' * _INDENT_LEVEL class _Writer(object): '''Writer", "as f: self.write_fn(f)```` where `self` is a higher level object and `write_fn(self, file)`", "a higher level object and `write_fn(self, file)` would look something like ````def _write_html(self,", "on the write() function for further explanation. ''' self._indent_level += 1 return self", "Whether or not to write a newline at the end Default is True.", "to write a newline at the end Default is True. ''' lines =", "To be used in a similiar fashion to the write() function in this", "''' if self._indent_level > 0: self._indent_level -= 1 return self def write(self, content='',", "def write(self, content='', end_in_newline=True): ''' Write content to the file open(path, 'w') needs", "be called prior to calling this function, typically by ````with open(file, 'w') as", "self._indent_level -= 1 return self def write(self, content='', end_in_newline=True): ''' Write content to", "See documentation on the write() function for further explanation. ''' if self._indent_level >", "enumerate(lines): # Indent if the start of a line if self._start_of_line: self._path.write(_INDENT *", "< len(lines) - 1: self._path.write('\\n') self._start_of_line = True # If the content should", "for writer _INDENT_LEVEL = 2 _INDENT = ' ' * _INDENT_LEVEL class _Writer(object):", "Args: content (str): Content to write, as a string Content is cleaned using", "self._indent_level = 0 self._start_of_line = True def __enter__(self): return self def __exit__(self, exception_type,", "the end Default is True. ''' lines = inspect.cleandoc(content).splitlines() for index, line in", "end Default is True. ''' lines = inspect.cleandoc(content).splitlines() for index, line in enumerate(lines):", "for index, line in enumerate(lines): # Indent if the start of a line", "path if an uncaught exception occured while writing: if exception_type: self._path.truncate(0) def indent(self):", "the exit exception_value: Value of exception that triggered the exit traceback: Traceback when", "to write to ''' self._path = path self._indent_level = 0 self._start_of_line = True", "inspect # Indent level for writer _INDENT_LEVEL = 2 _INDENT = ' '", "= 0 self._start_of_line = True def __enter__(self): return self def __exit__(self, exception_type, exception_value,", "''' self._indent_level += 1 return self def dedent(self): '''Dedent the writer by one", "self._path.write(line) # Write a new line if there's still more content if index", "self._indent_level > 0: self._indent_level -= 1 return self def write(self, content='', end_in_newline=True): '''", "* _INDENT_LEVEL class _Writer(object): '''Writer used to create source files with consistent formatting'''", "import inspect # Indent level for writer _INDENT_LEVEL = 2 _INDENT = '", "more content if index < len(lines) - 1: self._path.write('\\n') self._start_of_line = True #", "= True # If the content should end in a newline, write it", "called prior to calling this function, typically by ````with open(file, 'w') as f:", "similiar fashion to the write() function in this class. See documentation on the", "and `write_fn(self, file)` would look something like ````def _write_html(self, file): with _Writer(file) as", "= inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): # Indent if the start of", "''' # Clear the path if an uncaught exception occured while writing: if", "self._indent_level) # Write the line self._path.write(line) # Write a new line if there's", "by ````with open(file, 'w') as f: self.write_fn(f)```` where `self` is a higher level", "self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the line self._path.write(line) # Write a new", "self def dedent(self): '''Dedent the writer by one level To be used in", "like ````def _write_html(self, file): with _Writer(file) as w: w.write('string to write') w.write(self.string_to_write)```` Args:", "write() function for further explanation. ''' self._indent_level += 1 return self def dedent(self):", "traceback): ''' Args: exception_type: Type of exception that triggered the exit exception_value: Value", "Clear the path if an uncaught exception occured while writing: if exception_type: self._path.truncate(0)", "an uncaught exception occured while writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent the", "higher level object and `write_fn(self, file)` would look something like ````def _write_html(self, file):", "not to write a newline at the end Default is True. ''' lines", "the content should end in a newline, write it if end_in_newline: self._path.write('\\n') self._start_of_line", "write, as a string Content is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether", "write it if end_in_newline: self._path.write('\\n') self._start_of_line = True else: self._start_of_line = False return", "when exit was triggered ''' # Clear the path if an uncaught exception", "_Writer(object): '''Writer used to create source files with consistent formatting''' def __init__(self, path):", "exception_type: self._path.truncate(0) def indent(self): '''Indent the writer by one level To be used", "a similiar fashion to the write() function in this class. See documentation on", "occured while writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent the writer by one", "of exception that triggered the exit exception_value: Value of exception that triggered the", "in enumerate(lines): # Indent if the start of a line if self._start_of_line: self._path.write(_INDENT", "with consistent formatting''' def __init__(self, path): ''' Args: path (handle): File name and", "further explanation. ''' if self._indent_level > 0: self._indent_level -= 1 return self def", "0 self._start_of_line = True def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback):", "write() function in this class. See documentation on the write() function for further", "documentation on the write() function for further explanation. ''' if self._indent_level > 0:", "file)` would look something like ````def _write_html(self, file): with _Writer(file) as w: w.write('string", "self._path.write('\\n') self._start_of_line = True # If the content should end in a newline,", "in a newline, write it if end_in_newline: self._path.write('\\n') self._start_of_line = True else: self._start_of_line", "end in a newline, write it if end_in_newline: self._path.write('\\n') self._start_of_line = True else:", "- 1: self._path.write('\\n') self._start_of_line = True # If the content should end in", "look something like ````def _write_html(self, file): with _Writer(file) as w: w.write('string to write')", "File name and path to write to ''' self._path = path self._indent_level =", "triggered ''' # Clear the path if an uncaught exception occured while writing:", "# Write a new line if there's still more content if index <", "writer by one level To be used in a similiar fashion to the", "# Indent level for writer _INDENT_LEVEL = 2 _INDENT = ' ' *", "the exit traceback: Traceback when exit was triggered ''' # Clear the path", "line in enumerate(lines): # Indent if the start of a line if self._start_of_line:", "exception that triggered the exit exception_value: Value of exception that triggered the exit", "the write() function in this class. See documentation on the write() function for", "to the write() function in this class. See documentation on the write() function", "self._start_of_line = True # If the content should end in a newline, write", "function for further explanation. ''' if self._indent_level > 0: self._indent_level -= 1 return", "''' self._path = path self._indent_level = 0 self._start_of_line = True def __enter__(self): return", "* self._indent_level) # Write the line self._path.write(line) # Write a new line if", "for further explanation. ''' self._indent_level += 1 return self def dedent(self): '''Dedent the", "explanation. ''' if self._indent_level > 0: self._indent_level -= 1 return self def write(self,", "content (str): Content to write, as a string Content is cleaned using Python's", "True def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type:", "if exception_type: self._path.truncate(0) def indent(self): '''Indent the writer by one level To be", "def indent(self): '''Indent the writer by one level To be used in a", "self def write(self, content='', end_in_newline=True): ''' Write content to the file open(path, 'w')", "__enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type: Type of", "open(path, 'w') needs to be called prior to calling this function, typically by", "needs to be called prior to calling this function, typically by ````with open(file,", "to ''' self._path = path self._indent_level = 0 self._start_of_line = True def __enter__(self):", "1 return self def dedent(self): '''Dedent the writer by one level To be", "object and `write_fn(self, file)` would look something like ````def _write_html(self, file): with _Writer(file)", "write') w.write(self.string_to_write)```` Args: content (str): Content to write, as a string Content is", "w.write(self.string_to_write)```` Args: content (str): Content to write, as a string Content is cleaned", "index, line in enumerate(lines): # Indent if the start of a line if", "triggered the exit exception_value: Value of exception that triggered the exit traceback: Traceback", "''' Args: exception_type: Type of exception that triggered the exit exception_value: Value of", "0: self._indent_level -= 1 return self def write(self, content='', end_in_newline=True): ''' Write content", "# Write the line self._path.write(line) # Write a new line if there's still", "a new line if there's still more content if index < len(lines) -", "should end in a newline, write it if end_in_newline: self._path.write('\\n') self._start_of_line = True", "to calling this function, typically by ````with open(file, 'w') as f: self.write_fn(f)```` where", "2 _INDENT = ' ' * _INDENT_LEVEL class _Writer(object): '''Writer used to create", "if an uncaught exception occured while writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent", "Indent if the start of a line if self._start_of_line: self._path.write(_INDENT * self._indent_level) #", "source files with consistent formatting''' def __init__(self, path): ''' Args: path (handle): File", "path (handle): File name and path to write to ''' self._path = path", "w.write('string to write') w.write(self.string_to_write)```` Args: content (str): Content to write, as a string", "''' Args: path (handle): File name and path to write to ''' self._path", "lines = inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): # Indent if the start", "of a line if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the line self._path.write(line)", "create source files with consistent formatting''' def __init__(self, path): ''' Args: path (handle):", "self._indent_level += 1 return self def dedent(self): '''Dedent the writer by one level", "Write the line self._path.write(line) # Write a new line if there's still more", "Value of exception that triggered the exit traceback: Traceback when exit was triggered", "(str): Content to write, as a string Content is cleaned using Python's `inspect.cleandoc()`", "_Writer(file) as w: w.write('string to write') w.write(self.string_to_write)```` Args: content (str): Content to write,", "Indent level for writer _INDENT_LEVEL = 2 _INDENT = ' ' * _INDENT_LEVEL", "if index < len(lines) - 1: self._path.write('\\n') self._start_of_line = True # If the", "writing: if exception_type: self._path.truncate(0) def indent(self): '''Indent the writer by one level To", "def __enter__(self): return self def __exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type: Type", "self._path.truncate(0) def indent(self): '''Indent the writer by one level To be used in", "the write() function for further explanation. ''' self._indent_level += 1 return self def", "= ' ' * _INDENT_LEVEL class _Writer(object): '''Writer used to create source files", "is True. ''' lines = inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): # Indent", "= path self._indent_level = 0 self._start_of_line = True def __enter__(self): return self def", "exit was triggered ''' # Clear the path if an uncaught exception occured", "newline at the end Default is True. ''' lines = inspect.cleandoc(content).splitlines() for index,", "line if there's still more content if index < len(lines) - 1: self._path.write('\\n')", "the file open(path, 'w') needs to be called prior to calling this function,", "in a similiar fashion to the write() function in this class. See documentation", "if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the line self._path.write(line) # Write a", "it if end_in_newline: self._path.write('\\n') self._start_of_line = True else: self._start_of_line = False return self", "exception_value: Value of exception that triggered the exit traceback: Traceback when exit was", "Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not to write a newline at the", "a newline, write it if end_in_newline: self._path.write('\\n') self._start_of_line = True else: self._start_of_line =", "__exit__(self, exception_type, exception_value, traceback): ''' Args: exception_type: Type of exception that triggered the", "open(file, 'w') as f: self.write_fn(f)```` where `self` is a higher level object and", "_write_html(self, file): with _Writer(file) as w: w.write('string to write') w.write(self.string_to_write)```` Args: content (str):", "the start of a line if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write the", "def __init__(self, path): ''' Args: path (handle): File name and path to write", "is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not to write a", "to write') w.write(self.string_to_write)```` Args: content (str): Content to write, as a string Content", "if the start of a line if self._start_of_line: self._path.write(_INDENT * self._indent_level) # Write", "that triggered the exit traceback: Traceback when exit was triggered ''' # Clear", "as a string Content is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or", "''' Write content to the file open(path, 'w') needs to be called prior", "self._path.write(_INDENT * self._indent_level) # Write the line self._path.write(line) # Write a new line", "to be called prior to calling this function, typically by ````with open(file, 'w')", "file): with _Writer(file) as w: w.write('string to write') w.write(self.string_to_write)```` Args: content (str): Content", "at the end Default is True. ''' lines = inspect.cleandoc(content).splitlines() for index, line", "class _Writer(object): '''Writer used to create source files with consistent formatting''' def __init__(self,", "_INDENT_LEVEL = 2 _INDENT = ' ' * _INDENT_LEVEL class _Writer(object): '''Writer used", "(handle): File name and path to write to ''' self._path = path self._indent_level", "using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not to write a newline at", "True # If the content should end in a newline, write it if", "Content to write, as a string Content is cleaned using Python's `inspect.cleandoc()` end_in_newline", "'''Dedent the writer by one level To be used in a similiar fashion", "Args: exception_type: Type of exception that triggered the exit exception_value: Value of exception", "'w') needs to be called prior to calling this function, typically by ````with", "there's still more content if index < len(lines) - 1: self._path.write('\\n') self._start_of_line =", "or not to write a newline at the end Default is True. '''", "# If the content should end in a newline, write it if end_in_newline:", "files with consistent formatting''' def __init__(self, path): ''' Args: path (handle): File name", "'''Indent the writer by one level To be used in a similiar fashion", "index < len(lines) - 1: self._path.write('\\n') self._start_of_line = True # If the content", "exception_value, traceback): ''' Args: exception_type: Type of exception that triggered the exit exception_value:", "(bool): Whether or not to write a newline at the end Default is", "# Indent if the start of a line if self._start_of_line: self._path.write(_INDENT * self._indent_level)", "to create source files with consistent formatting''' def __init__(self, path): ''' Args: path", "return self def dedent(self): '''Dedent the writer by one level To be used", "file open(path, 'w') needs to be called prior to calling this function, typically", "this function, typically by ````with open(file, 'w') as f: self.write_fn(f)```` where `self` is", "exception_type, exception_value, traceback): ''' Args: exception_type: Type of exception that triggered the exit", "len(lines) - 1: self._path.write('\\n') self._start_of_line = True # If the content should end", "''' lines = inspect.cleandoc(content).splitlines() for index, line in enumerate(lines): # Indent if the", "line self._path.write(line) # Write a new line if there's still more content if", "path): ''' Args: path (handle): File name and path to write to '''", "dedent(self): '''Dedent the writer by one level To be used in a similiar", "`write_fn(self, file)` would look something like ````def _write_html(self, file): with _Writer(file) as w:", "end_in_newline (bool): Whether or not to write a newline at the end Default", "1: self._path.write('\\n') self._start_of_line = True # If the content should end in a", "be used in a similiar fashion to the write() function in this class.", "1 return self def write(self, content='', end_in_newline=True): ''' Write content to the file", "content='', end_in_newline=True): ''' Write content to the file open(path, 'w') needs to be", "Content is cleaned using Python's `inspect.cleandoc()` end_in_newline (bool): Whether or not to write", "Write a new line if there's still more content if index < len(lines)", "function in this class. See documentation on the write() function for further explanation." ]
[ "choices def __crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i,", "= 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7, .1,", "return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count,", "__crossover(mother_data: list, father_data: list, start: int, end: int) -> list: dimension = len(mother_data)", "time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate() t2 = time.time() if", "if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO:", "rd[i] = sum([pair[0] * pair[1] for pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i],", "1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal", "1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]]", "result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2", "if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] +=", ">= 1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2 +", "if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy()", "-= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1):", "for key in range(2 * QUEENS - 1)} self.cost = 0 for i", "q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]]", "v in father_data[end:] + father_data[:end]: if v not in data: if i ==", "@staticmethod def __crossover(mother_data: list, father_data: list, start: int, end: int) -> list: dimension", "def __local_search(self): self.__data = [+c for c in self.__data] def answer(self) -> Chromosome:", "not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals", "+= 1 if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2 +", "1 if new_cost <= result.cost or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1]", "= sum([pair[0] * pair[1] for pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i", "parents = self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0]", "for i in range(P_COUNT)] for p in processes: p.start() for p in processes:", "= Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child,", "args=(i, pair_chunks[i], rd) ) for i in range(P_COUNT)] for p in processes: p.start()", "other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n -", "self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values())", "= result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1 -", "side2) = random.sample(range(QUEENS + 1), 2) start = min(side1, side2) end = max(side1,", "result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2", "- result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] += 1 if", "0 for key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key: 0 for", "= time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search() t4 = time.time()", "== list: self.__data = countOrData else: raise Exception() self.__data.sort() def iterate(self): t0 =", "- result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if", "new_cost <= result.cost or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals =", "v i += 1 return data def solved(self): return self.cost == 0 class", "= True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO =", "= math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count = n -", "len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count =", "{i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers:", "* parents[i + 1] for i in range(0, len(parents) - 1, 2)], []))", "for key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key: 0 for key", "if i == dimension: i = 0 data[i] = v i += 1", "if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if", "def __crossover(mother_data: list, father_data: list, start: int, end: int) -> list: dimension =", "other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data = [+c for c in", "for i in range(0, len(parents) - 1, 2)], P_COUNT) manager = mp.Manager() rd", "[.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population = Population(N) i", "def __swap(self, count, should_be_better): global QUEENS result = Chromosome(self.__data) for _ in range(count):", "-= 1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2 -", "- result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if", "class Population: def __init__(self, countOrData): if type(countOrData) == int: self.__data = [Chromosome() for", "QUEENS (side1, side2) = random.sample(range(QUEENS + 1), 2) start = min(side1, side2) end", "1 def __str__(self): return self.__data.__str__() + ': ' + str(self.cost) def __lt__(self, other):", "new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]] >", "= [Chromosome() for _ in range(countOrData)] elif type(countOrData) == list: self.__data = countOrData", "t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}')", "manager = mp.Manager() rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd)", "return self.cost == 0 class Population: def __init__(self, countOrData): if type(countOrData) == int:", "{t3 - t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4", "n) roulette = roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) / n))", "second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0,", "return self.__data[-1] def answers(self) -> list: return list(map(lambda c: c.cost, self.__data)) t_start =", "result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1", "> other.cost def __mul__(self, other): global QUEENS (side1, side2) = random.sample(range(QUEENS + 1),", "- result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost += 1", "return result @staticmethod def __crossover(mother_data: list, father_data: list, start: int, end: int) ->", "{key: 0 for key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key: 0", "+ result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -= 1", "or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals =", "result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost return result @staticmethod", "answers(self) -> list: return list(map(lambda c: c.cost, self.__data)) t_start = time.time() P_COUNT =", "global QUEENS (side1, side2) = random.sample(range(QUEENS + 1), 2) start = min(side1, side2)", "Population: def __init__(self, countOrData): if type(countOrData) == int: self.__data = [Chromosome() for _", "t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}') def __choose(self):", "data self.__maindiagonals = {key: 0 for key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals", "other.cost def __mul__(self, other): global QUEENS (side1, side2) = random.sample(range(QUEENS + 1), 2)", "self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals: if", "target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i in range(P_COUNT)] for p in processes:", "0 data[i] = v i += 1 return data def solved(self): return self.cost", "(side1, side2) = random.sample(range(QUEENS + 1), 2) start = min(side1, side2) end =", "- t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}')", "in range(2 * QUEENS - 1)} self.cost = 0 for i in range(QUEENS):", "PRINT_ALL_TIME_INFO = True PARALLEL = False class Chromosome: def __init__(self, data=None): global QUEENS", "end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child] def __invert__(self): return", "for c in self.__data] def __replacement(self, children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0]", "= random.randint(0, n) roulette = roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette)", "= countOrData else: raise Exception() self.__data.sort() def iterate(self): t0 = time.time() children =", "= numpy.array_split([[parents[i], parents[i + 1]] for i in range(0, len(parents) - 1, 2)],", "LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION", "[150, 200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5", "def __mutate(self): self.__data = [~c for c in self.__data] def __replacement(self, children): n", "n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] * n)", "(new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1", "1, 2)], P_COUNT) manager = mp.Manager() rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator,", "= max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end))", "c.cost, self.__data)) t_start = time.time() P_COUNT = os.cpu_count() QUEENS = 5000 N =", "P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] =", "other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data", "self.__antidiagonals[i + self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in", "': ' + str(self.cost) def __lt__(self, other): return self.cost > other.cost def __mul__(self,", "return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0],", "- result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] += 1 if", "+ self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data = [+c for c in self.__data]", "= list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal > 0): self.cost", "result.cost = new_cost return result @staticmethod def __crossover(mother_data: list, father_data: list, start: int,", "= False PRINT_ALL_TIME_INFO = True PARALLEL = False class Chromosome: def __init__(self, data=None):", "LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global QUEENS result = Chromosome(self.__data) for _", "i = end for v in father_data[end:] + father_data[:end]: if v not in", "= time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}') def __choose(self): n", "time.time() P_COUNT = os.cpu_count() QUEENS = 5000 N = 10 MUTATION_DEGREE = 1", "children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] *", "- best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def", "Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False)", "result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1", "in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for i in range(0,", "(new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] += 1", "key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key: 0 for key in", "1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]]", "> 1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2 -", "roulette = sum([[i] * (i + 1) for i in range(n)], []) turning", "processes: p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i + 1] for", "result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2", "if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break i", "* (i + 1) for i in range(n)], []) turning = random.randint(0, n)", "= [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i in range(P_COUNT)] for p", "self.__data.sort() def __local_search(self): self.__data = [+c for c in self.__data] def answer(self) ->", "result.cost or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals", "(i + 1) for i in range(n)], []) turning = random.randint(0, n) roulette", "i in range(n)], []) turning = random.randint(0, n) roulette = roulette[turning:] + roulette[:turning]", "if new_cost <= result.cost or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals", "+= 1 new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]] > 1):", "data def solved(self): return self.cost == 0 class Population: def __init__(self, countOrData): if", "if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] +=", "father_data[:end]: if v not in data: if i == start: i = end", "time.time() children = self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 -", "end: int) -> list: dimension = len(mother_data) data = [None] * dimension data[start:end]", "roulette = roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) / n)) choices", "other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count = n", "def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global QUEENS result", "self.__data)) t_start = time.time() P_COUNT = os.cpu_count() QUEENS = 5000 N = 10", "self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values())", "new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost +=", "= mp.Manager() rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) )", "took {t4 - t3}') def __choose(self): n = len(self.__data) roulette = sum([[i] *", "Chromosome: def __init__(self, data=None): global QUEENS if data is None: self.__data = list(range(QUEENS))", "def answers(self) -> list: return list(map(lambda c: c.cost, self.__data)) t_start = time.time() P_COUNT", "{t1 - t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 -", "self.__data = [+c for c in self.__data] def answer(self) -> Chromosome: return self.__data[-1]", ") self.__data.sort() def __local_search(self): self.__data = [+c for c in self.__data] def answer(self)", "v not in data: if i == start: i = end if i", "c: c.cost, self.__data)) t_start = time.time() P_COUNT = os.cpu_count() QUEENS = 5000 N", "new_antidiagonals result.cost = new_cost return result @staticmethod def __crossover(mother_data: list, father_data: list, start:", "__crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd):", "range(countOrData)] elif type(countOrData) == list: self.__data = countOrData else: raise Exception() self.__data.sort() def", "-> Chromosome: return self.__data[-1] def answers(self) -> list: return list(map(lambda c: c.cost, self.__data))", "' + str(self.cost) def __lt__(self, other): return self.cost > other.cost def __mul__(self, other):", "- 1, 2)], P_COUNT) manager = mp.Manager() rd = manager.dict() processes = [mp.Process(", "= manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i in", "as mp import numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER", "math import os import random import multiprocessing as mp import numpy import time", "__init__(self, data=None): global QUEENS if data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else:", "n)) choices = [] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self):", "self.__antidiagonals = {key: 0 for key in range(2 * QUEENS - 1)} self.cost", "- t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}')", "math.ceil(len(roulette) / n)) choices = [] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices", "{population.answers()}\") population.iterate() if population.answer().solved(): break i += 1 print(population.answer()) t_end = time.time() if", "i in range(P_COUNT)] for p in processes: p.start() for p in processes: p.join()", "if data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals", "other): return self.cost > other.cost def __mul__(self, other): global QUEENS (side1, side2) =", "/ n)) choices = [] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def", "for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] +=", "+ str(self.cost) def __lt__(self, other): return self.cost > other.cost def __mul__(self, other): global", "result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2", "+ result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost += 1", "result.__antidiagonals = new_antidiagonals result.cost = new_cost return result @staticmethod def __crossover(mother_data: list, father_data:", "range(count): (q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost", "== start: i = end if i == dimension: i = 0 data[i]", "new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -=", "(new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost += 1 if new_cost <= result.cost or", "pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents) if", "range(0, len(parents) - 1, 2)], P_COUNT) manager = mp.Manager() rd = manager.dict() processes", "- result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -= 1", "* n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count", "t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate() t2 =", "PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL = False class Chromosome: def __init__(self,", "data=None): global QUEENS if data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data", "__pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global QUEENS result =", "= new_cost return result @staticmethod def __crossover(mother_data: list, father_data: list, start: int, end:", "solved(self): return self.cost == 0 class Population: def __init__(self, countOrData): if type(countOrData) ==", "result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2", "import random import multiprocessing as mp import numpy import time PRINT_SLICE_INFO = False", "- other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count)", "QUEENS - 1)} self.cost = 0 for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]]", "1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]]", "1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]]", "= .5 population = Population(N) i = 0 while True: if PRINT_ITERATION_NO: print(f\"Iteration:", "elif type(countOrData) == list: self.__data = countOrData else: raise Exception() self.__data.sort() def iterate(self):", "- best_children_count - other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n -", "new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -=", "len(roulette), math.ceil(len(roulette) / n)) choices = [] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return", "random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data = [+c", "False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global QUEENS", "P_COUNT = os.cpu_count() QUEENS = 5000 N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE", "print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break i += 1 print(population.answer()) t_end =", "Chromosome: return self.__data[-1] def answers(self) -> list: return list(map(lambda c: c.cost, self.__data)) t_start", "in self.__data] def answer(self) -> Chromosome: return self.__data[-1] def answers(self) -> list: return", "self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] * pair[1]", "+= 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal", "new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]] >", "c in self.__data] def answer(self) -> Chromosome: return self.__data[-1] def answers(self) -> list:", "list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals = {key: 0 for key in", "for _ in range(count): (q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2)", "1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]]", "(new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] += 1", "self.__data = [Chromosome() for _ in range(countOrData)] elif type(countOrData) == list: self.__data =", "+ 1) for i in range(n)], []) turning = random.randint(0, n) roulette =", "= mother_data[start:end] i = end for v in father_data[end:] + father_data[:end]: if v", "def __lt__(self, other): return self.cost > other.cost def __mul__(self, other): global QUEENS (side1,", "math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count = n - best_children_count", "result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost", "* pair[1] for pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]]", "1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]]", "n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count = n - best_children_count - other_children_count", "self.__data = [~c for c in self.__data] def __replacement(self, children): n = len(children.__data)", "1)} self.__antidiagonals = {key: 0 for key in range(2 * QUEENS - 1)}", "in processes: p.start() for p in processes: p.join() return Population(sum(rd.values(), [])) else: return", "1 new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost", "[])) def __mutate(self): self.__data = [~c for c in self.__data] def __replacement(self, children):", "2)], [])) def __mutate(self): self.__data = [~c for c in self.__data] def __replacement(self,", "end for v in father_data[end:] + father_data[:end]: if v not in data: if", "def __choose(self): n = len(self.__data) roulette = sum([[i] * (i + 1) for", "else: self.__data = data self.__maindiagonals = {key: 0 for key in range(-QUEENS, QUEENS", "in range(P_COUNT)] for p in processes: p.start() for p in processes: p.join() return", "random import multiprocessing as mp import numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO", "+ result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] += 1 if", "roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) / n)) choices = [] for pointer", "__choose(self): n = len(self.__data) roulette = sum([[i] * (i + 1) for i", "= Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE),", "Chromosome(self.__data) for _ in range(count): (q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1,", "t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children) t3 =", "PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS:", "= 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION =", "QUEENS + 1)} self.__antidiagonals = {key: 0 for key in range(2 * QUEENS", "import os import random import multiprocessing as mp import numpy import time PRINT_SLICE_INFO", "result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1", "1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]]", "print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start,", "father_data[end:] + father_data[:end]: if v not in data: if i == start: i", "result @staticmethod def __crossover(mother_data: list, father_data: list, start: int, end: int) -> list:", "True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True", "if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] * pair[1] for pair", "count, should_be_better): global QUEENS result = Chromosome(self.__data) for _ in range(count): (q1, q2)", "print(f'Replacement took {t3 - t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search", "+ 1] for i in range(0, len(parents) - 1, 2)], [])) def __mutate(self):", "- best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data = [+c for", "father_data: list, start: int, end: int) -> list: dimension = len(mother_data) data =", "-= 1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1 -", "in processes: p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i + 1]", "ESCAPE_PROPORTION = .5 population = Population(N) i = 0 while True: if PRINT_ITERATION_NO:", "-= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1):", "population.answer().solved(): break i += 1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The whole", "def solved(self): return self.cost == 0 class Population: def __init__(self, countOrData): if type(countOrData)", "end)) return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return", "end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end))", "if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\")", "+ ': ' + str(self.cost) def __lt__(self, other): return self.cost > other.cost def", "= self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate()", "new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost return result @staticmethod def __crossover(mother_data: list,", "* n) best_parents_count = n - best_children_count - other_children_count - other_parents_count self.__data =", "- 1)} self.cost = 0 for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] +=", "new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]] >=", "best_children_count - other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)],", "False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False", "= False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL =", "PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break i += 1 print(population.answer()) t_end", "time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False", "other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child] def", "Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child]", "if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -=", "self.cost = 0 for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i", "1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]]", "int, end: int) -> list: dimension = len(mother_data) data = [None] * dimension", "n = len(self.__data) roulette = sum([[i] * (i + 1) for i in", "best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data = [+c for c", "-= 1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1 +", "import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS =", "self.cost == 0 class Population: def __init__(self, countOrData): if type(countOrData) == int: self.__data", "( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) +", "new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost +=", "self.__data = data self.__maindiagonals = {key: 0 for key in range(-QUEENS, QUEENS +", "+ result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -= 1", "diagonal in diagonals: if (diagonal > 0): self.cost += diagonal - 1 def", "rd): rd[i] = sum([pair[0] * pair[1] for pair in pair_chunk], []) pair_chunks =", "if population.answer().solved(): break i += 1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The", "len(mother_data) data = [None] * dimension data[start:end] = mother_data[start:end] i = end for", "1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3", "None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals = {key: 0", "-= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1):", "1 if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]]", "min(side1, side2) end = max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data,", "[~c for c in self.__data] def __replacement(self, children): n = len(children.__data) best_children_count =", "in data: if i == start: i = end if i == dimension:", "= result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1):", "+ random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] )", "False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL = False", "1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]]", "= end if i == dimension: i = 0 data[i] = v i", "random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals", "answer(self) -> Chromosome: return self.__data[-1] def answers(self) -> list: return list(map(lambda c: c.cost,", "Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break i += 1 print(population.answer()) t_end = time.time()", ".5 population = Population(N) i = 0 while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\")", "return list(map(lambda c: c.cost, self.__data)) t_start = time.time() P_COUNT = os.cpu_count() QUEENS =", "population = Population(N) i = 0 while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if", "numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS", "= random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy()", ".1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population = Population(N) i =", "__invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self,", "list(map(lambda c: c.cost, self.__data)) t_start = time.time() P_COUNT = os.cpu_count() QUEENS = 5000", "__replacement(self, children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1]", "for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT parents =", "0 class Population: def __init__(self, countOrData): if type(countOrData) == int: self.__data = [Chromosome()", "ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population = Population(N) i = 0 while", "True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer())", "= os.cpu_count() QUEENS = 5000 N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE =", "- other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n", "if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data,", "True) def __swap(self, count, should_be_better): global QUEENS result = Chromosome(self.__data) for _ in", "PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}') def __choose(self): n = len(self.__data) roulette", "range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key: 0 for key in range(2 *", "+= 1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for", "new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost +=", "range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals =", "PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL", "True PARALLEL = False class Chromosome: def __init__(self, data=None): global QUEENS if data", "QUEENS result = Chromosome(self.__data) for _ in range(count): (q1, q2) = random.sample(range(QUEENS), 2)", "not in data: if i == start: i = end if i ==", "n - best_children_count - other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n", "PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL = False class", "self.__data = countOrData else: raise Exception() self.__data.sort() def iterate(self): t0 = time.time() children", "def __init__(self, countOrData): if type(countOrData) == int: self.__data = [Chromosome() for _ in", "random.randint(0, n) roulette = roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) /", "i += 1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The whole process took", "print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if", "range(n)], []) turning = random.randint(0, n) roulette = roulette[turning:] + roulette[:turning] pointers =", "1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The whole process took {t_end -", "global QUEENS result = Chromosome(self.__data) for _ in range(count): (q1, q2) = random.sample(range(QUEENS),", "in range(0, len(parents) - 1, 2)], [])) def __mutate(self): self.__data = [~c for", "self.cost > other.cost def __mul__(self, other): global QUEENS (side1, side2) = random.sample(range(QUEENS +", "def __crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk,", "if (diagonal > 0): self.cost += diagonal - 1 def __str__(self): return self.__data.__str__()", "countOrData): if type(countOrData) == int: self.__data = [Chromosome() for _ in range(countOrData)] elif", "+ self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals:", "len(parents) - 1, 2)], [])) def __mutate(self): self.__data = [~c for c in", "__local_search(self): self.__data = [+c for c in self.__data] def answer(self) -> Chromosome: return", "new_cost return result @staticmethod def __crossover(mother_data: list, father_data: list, start: int, end: int)", "in father_data[end:] + father_data[:end]: if v not in data: if i == start:", "i in range(0, len(parents) - 1, 2)], [])) def __mutate(self): self.__data = [~c", "max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child", "= 0 data[i] = v i += 1 return data def solved(self): return", "def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def", "- result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost += 1", "i == start: i = end if i == dimension: i = 0", "self.__data, start, end)) return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def", ">= 1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2 -", "+= 1 if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost += 1 if new_cost", "= new_antidiagonals result.cost = new_cost return result @staticmethod def __crossover(mother_data: list, father_data: list,", "children = self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}')", "[] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT parents", "data[start:end] = mother_data[start:end] i = end for v in father_data[end:] + father_data[:end]: if", "= 0 while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\")", "1), 2) start = min(side1, side2) end = max(side1, side2) if PRINT_SLICE_INFO: print(start,", "result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2", "-= 1 new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]] > 1):", "== 0 class Population: def __init__(self, countOrData): if type(countOrData) == int: self.__data =", "pointers = range(0, len(roulette), math.ceil(len(roulette) / n)) choices = [] for pointer in", "- self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals = list(self.__maindiagonals.values()) +", "side2) if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child =", "= [150, 200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION =", "1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost", "import math import os import random import multiprocessing as mp import numpy import", "- 1, 2)], [])) def __mutate(self): self.__data = [~c for c in self.__data]", "n) best_parents_count = n - best_children_count - other_children_count - other_parents_count self.__data = (", "return choices def __crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL: def", "= time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate() t2 = time.time()", "t_start = time.time() P_COUNT = os.cpu_count() QUEENS = 5000 N = 10 MUTATION_DEGREE", "0 while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if", "second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True)", "self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}') def", "other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count = n - best_children_count - other_children_count -", "in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents)", "= math.floor(REPLACEMENT[2] * n) best_parents_count = n - best_children_count - other_children_count - other_parents_count", "+= 1 return data def solved(self): return self.cost == 0 class Population: def", "+= 1 if new_cost <= result.cost or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2],", "= list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals = {key: 0 for key", "key in range(2 * QUEENS - 1)} self.cost = 0 for i in", "{population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break", "= ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count)", "def __init__(self, data=None): global QUEENS if data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data)", "def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] * pair[1] for pair in pair_chunk],", "dimension: i = 0 data[i] = v i += 1 return data def", "os.cpu_count() QUEENS = 5000 N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150,", "> 1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1 +", "= False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS =", "_ in range(count): (q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost", "= 5000 N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT", "0): self.cost += diagonal - 1 def __str__(self): return self.__data.__str__() + ': '", "other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) +", "return self.cost > other.cost def __mul__(self, other): global QUEENS (side1, side2) = random.sample(range(QUEENS", "Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved():", "dimension data[start:end] = mother_data[start:end] i = end for v in father_data[end:] + father_data[:end]:", "+= 1 new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]] > 1):", "__swap(self, count, should_be_better): global QUEENS result = Chromosome(self.__data) for _ in range(count): (q1,", "* QUEENS - 1)} self.cost = 0 for i in range(QUEENS): self.__maindiagonals[i -", "PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation", "* n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count = n - best_children_count -", "math.floor(REPLACEMENT[2] * n) best_parents_count = n - best_children_count - other_children_count - other_parents_count self.__data", "type(countOrData) == list: self.__data = countOrData else: raise Exception() self.__data.sort() def iterate(self): t0", "if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO:", "False PRINT_ALL_TIME_INFO = True PARALLEL = False class Chromosome: def __init__(self, data=None): global", "PARALLEL = False class Chromosome: def __init__(self, data=None): global QUEENS if data is", "(new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]] += 1", "parents[i + 1]] for i in range(0, len(parents) - 1, 2)], P_COUNT) manager", "def __str__(self): return self.__data.__str__() + ': ' + str(self.cost) def __lt__(self, other): return", "= [None] * dimension data[start:end] = mother_data[start:end] i = end for v in", "if v not in data: if i == start: i = end if", "p in processes: p.start() for p in processes: p.join() return Population(sum(rd.values(), [])) else:", "0 for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]]", "1, 2)], [])) def __mutate(self): self.__data = [~c for c in self.__data] def", "= min(side1, side2) end = max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child =", "def iterate(self): t0 = time.time() children = self.__crossover() t1 = time.time() if PRINT_TIME_INFO:", "= random.sample(range(QUEENS + 1), 2) start = min(side1, side2) end = max(side1, side2)", "= [+c for c in self.__data] def answer(self) -> Chromosome: return self.__data[-1] def", "+ 1), 2) start = min(side1, side2) end = max(side1, side2) if PRINT_SLICE_INFO:", "= range(0, len(roulette), math.ceil(len(roulette) / n)) choices = [] for pointer in pointers:", "time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children) t3 = time.time() if", "i = 0 while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer:", "list: return list(map(lambda c: c.cost, self.__data)) t_start = time.time() P_COUNT = os.cpu_count() QUEENS", "start: int, end: int) -> list: dimension = len(mother_data) data = [None] *", "took {t2 - t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3", "= len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count", "in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] += 1 diagonals", "__lt__(self, other): return self.cost > other.cost def __mul__(self, other): global QUEENS (side1, side2)", "= time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children) t3 = time.time()", "children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:]", "return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global QUEENS result = Chromosome(self.__data)", "took {t1 - t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2", "return Population(sum([parents[i] * parents[i + 1] for i in range(0, len(parents) - 1,", "str(self.cost) def __lt__(self, other): return self.cost > other.cost def __mul__(self, other): global QUEENS", "if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2 - result.__data[q2]] -=", "1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]]", "P_COUNT) manager = mp.Manager() rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i],", "return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i + 1] for i in", "random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] * pair[1] for", "result = Chromosome(self.__data) for _ in range(count): (q1, q2) = random.sample(range(QUEENS), 2) if", "def answer(self) -> Chromosome: return self.__data[-1] def answers(self) -> list: return list(map(lambda c:", "math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] * n)", "for c in self.__data] def answer(self) -> Chromosome: return self.__data[-1] def answers(self) ->", "new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -=", "+= diagonal - 1 def __str__(self): return self.__data.__str__() + ': ' + str(self.cost)", "sum([[i] * (i + 1) for i in range(n)], []) turning = random.randint(0,", "manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i in range(P_COUNT)]", "+= 1 if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2 -", "1 new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost", "in diagonals: if (diagonal > 0): self.cost += diagonal - 1 def __str__(self):", "[mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i in range(P_COUNT)] for p in", "children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children) t3", "self.__data[-1] def answers(self) -> list: return list(map(lambda c: c.cost, self.__data)) t_start = time.time()", "{t2 - t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 -", "False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL = False class Chromosome: def", "+ father_data[:end]: if v not in data: if i == start: i =", "= {key: 0 for key in range(2 * QUEENS - 1)} self.cost =", "1 new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost", "type(countOrData) == int: self.__data = [Chromosome() for _ in range(countOrData)] elif type(countOrData) ==", "in range(n)], []) turning = random.randint(0, n) roulette = roulette[turning:] + roulette[:turning] pointers", "0 for key in range(2 * QUEENS - 1)} self.cost = 0 for", "start = min(side1, side2) end = max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child", "if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -=", "> 1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2 +", "processes: p.start() for p in processes: p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i]", "len(parents) - 1, 2)], P_COUNT) manager = mp.Manager() rd = manager.dict() processes =", "+= 1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The whole process took {t_end", "print(q1, q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 -", "(q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals", "for i in range(n)], []) turning = random.randint(0, n) roulette = roulette[turning:] +", "len(self.__data) roulette = sum([[i] * (i + 1) for i in range(n)], [])", "t3}') def __choose(self): n = len(self.__data) roulette = sum([[i] * (i + 1)", "print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All", "self.__data.__str__() + ': ' + str(self.cost) def __lt__(self, other): return self.cost > other.cost", "start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return [first_child, second_child] def __invert__(self):", "first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data, start, end)) return", "p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i + 1] for i", "q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals =", "n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] * n) best_parents_count =", "(new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1", "<= result.cost or not should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals", "== dimension: i = 0 data[i] = v i += 1 return data", "= roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) / n)) choices =", "result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1", "self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data = [+c for c in self.__data] def", "if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO:", ">= 1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1 -", "<reponame>kmirzavaziri/ma-nqp<filename>main.py import math import os import random import multiprocessing as mp import numpy", "class Chromosome: def __init__(self, data=None): global QUEENS if data is None: self.__data =", "__init__(self, countOrData): if type(countOrData) == int: self.__data = [Chromosome() for _ in range(countOrData)]", "global QUEENS if data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data =", "countOrData else: raise Exception() self.__data.sort() def iterate(self): t0 = time.time() children = self.__crossover()", "[+c for c in self.__data] def answer(self) -> Chromosome: return self.__data[-1] def answers(self)", "1 if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost += 1 new_maindiagonals[q2 - result.__data[q1]]", "Exception() self.__data.sort() def iterate(self): t0 = time.time() children = self.__crossover() t1 = time.time()", "pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for i in", "= v i += 1 return data def solved(self): return self.cost == 0", "best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2]", "= result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost return", "pair_chunks[i], rd) ) for i in range(P_COUNT)] for p in processes: p.start() for", "for p in processes: p.start() for p in processes: p.join() return Population(sum(rd.values(), []))", "for diagonal in diagonals: if (diagonal > 0): self.cost += diagonal - 1", "new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -=", "print(f'Local Search took {t4 - t3}') def __choose(self): n = len(self.__data) roulette =", "PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] * pair[1] for pair in", ") for i in range(P_COUNT)] for p in processes: p.start() for p in", "Search took {t4 - t3}') def __choose(self): n = len(self.__data) roulette = sum([[i]", "= end for v in father_data[end:] + father_data[:end]: if v not in data:", "200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population", "Population(sum([parents[i] * parents[i + 1] for i in range(0, len(parents) - 1, 2)],", "= Population(N) i = 0 while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER:", "Population(N) i = 0 while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best", "for i in range(0, len(parents) - 1, 2)], [])) def __mutate(self): self.__data =", "= 0 for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i +", "= result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1", "self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search() t4", "- t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4 -", "+= 1 new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]] > 1):", "PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1", "print(f'Mutation took {t2 - t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took", "took {t3 - t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local Search took", "list, start: int, end: int) -> list: dimension = len(mother_data) data = [None]", "+ roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) / n)) choices = [] for", "range(2 * QUEENS - 1)} self.cost = 0 for i in range(QUEENS): self.__maindiagonals[i", "10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7, .1, .1]", "1 if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]]", "new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]] >=", "-= 1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2 +", "result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost =", "Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i + 1] for i in range(0,", "= .3 ESCAPE_PROPORTION = .5 population = Population(N) i = 0 while True:", "pair_chunk, rd): rd[i] = sum([pair[0] * pair[1] for pair in pair_chunk], []) pair_chunks", "print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break i += 1", "in range(count): (q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO: print(q1, q2) new_cost =", "self.__data] def __replacement(self, children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count", "PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start, end)) second_child = Chromosome(self.__crossover(other.__data, self.__data,", "roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette), math.ceil(len(roulette) / n)) choices = []", "+ 1)} self.__antidiagonals = {key: 0 for key in range(2 * QUEENS -", "should_be_better: result.__data[q1], result.__data[q2] = result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost", "1] for i in range(0, len(parents) - 1, 2)], [])) def __mutate(self): self.__data", "if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break i += 1 print(population.answer())", "new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if (new_antidiagonals[q2 + result.__data[q2]] >=", "2) start = min(side1, side2) end = max(side1, side2) if PRINT_SLICE_INFO: print(start, end)", "= Chromosome(self.__data) for _ in range(count): (q1, q2) = random.sample(range(QUEENS), 2) if PRINT_SLICE_INFO:", "1 if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost += 1 if new_cost <=", "for pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for i", "N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7,", "in range(0, len(parents) - 1, 2)], P_COUNT) manager = mp.Manager() rd = manager.dict()", "range(0, len(parents) - 1, 2)], [])) def __mutate(self): self.__data = [~c for c", "self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global QUEENS result = Chromosome(self.__data) for", "self.__data = ( children.__data[-best_children_count:] + random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)],", "- t3}') def __choose(self): n = len(self.__data) roulette = sum([[i] * (i +", "if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost += 1 if new_cost <= result.cost", "= False class Chromosome: def __init__(self, data=None): global QUEENS if data is None:", "dimension = len(mother_data) data = [None] * dimension data[start:end] = mother_data[start:end] i =", "if PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}') def __choose(self): n = len(self.__data)", "= len(self.__data) roulette = sum([[i] * (i + 1) for i in range(n)],", "PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate()", "1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]]", "for _ in range(countOrData)] elif type(countOrData) == list: self.__data = countOrData else: raise", "[]) turning = random.randint(0, n) roulette = roulette[turning:] + roulette[:turning] pointers = range(0,", "rd) ) for i in range(P_COUNT)] for p in processes: p.start() for p", "PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO", "self.cost += diagonal - 1 def __str__(self): return self.__data.__str__() + ': ' +", "-> list: return list(map(lambda c: c.cost, self.__data)) t_start = time.time() P_COUNT = os.cpu_count()", "PRINT_ITERATION_BEST_ANSWER_DETAILS: print(population.answer()) if PRINT_ITERATION_ALL_ANSWERS: print(f\"All Answers: {population.answers()}\") population.iterate() if population.answer().solved(): break i +=", "= [~c for c in self.__data] def __replacement(self, children): n = len(children.__data) best_children_count", "MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better): global", "print(f'Crossover took {t1 - t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took", "mp.Manager() rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for", "p.start() for p in processes: p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] *", "int: self.__data = [Chromosome() for _ in range(countOrData)] elif type(countOrData) == list: self.__data", "result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost += 1 if", "[first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]),", "diagonals: if (diagonal > 0): self.cost += diagonal - 1 def __str__(self): return", "random.sample(range(QUEENS + 1), 2) start = min(side1, side2) end = max(side1, side2) if", "numpy.array_split([[parents[i], parents[i + 1]] for i in range(0, len(parents) - 1, 2)], P_COUNT)", "1]] for i in range(0, len(parents) - 1, 2)], P_COUNT) manager = mp.Manager()", "time.time() if PRINT_TIME_INFO: print(f'Local Search took {t4 - t3}') def __choose(self): n =", "import multiprocessing as mp import numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO =", "pair[1] for pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for", "= True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO =", "diagonal - 1 def __str__(self): return self.__data.__str__() + ': ' + str(self.cost) def", "REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population =", "iterate(self): t0 = time.time() children = self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover", "result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]]", "t0 = time.time() children = self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took", "-> list: dimension = len(mother_data) data = [None] * dimension data[start:end] = mother_data[start:end]", "best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self):", "QUEENS = 5000 N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200]", "time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search() t4 = time.time() if", "turning = random.randint(0, n) roulette = roulette[turning:] + roulette[:turning] pointers = range(0, len(roulette),", "mp import numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER =", "while True: if PRINT_ITERATION_NO: print(f\"Iteration: {i}\") if PRINT_ITERATION_BEST_ANSWER: print(f\"Best Answer: {population.answer().cost}\") if PRINT_ITERATION_BEST_ANSWER_DETAILS:", "should_be_better): global QUEENS result = Chromosome(self.__data) for _ in range(count): (q1, q2) =", "= sum([[i] * (i + 1) for i in range(n)], []) turning =", "= [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population = Population(N)", "True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False", "+ 1]] for i in range(0, len(parents) - 1, 2)], P_COUNT) manager =", "print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The whole process took {t_end - t_start}')", "new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1", "- result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -= 1", "+ result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2 + result.__data[q1]] += 1 if", "= self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] *", "processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i in range(P_COUNT)] for", "1)} self.cost = 0 for i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1", "list: self.__data = countOrData else: raise Exception() self.__data.sort() def iterate(self): t0 = time.time()", "__str__(self): return self.__data.__str__() + ': ' + str(self.cost) def __lt__(self, other): return self.cost", "= time.time() P_COUNT = os.cpu_count() QUEENS = 5000 N = 10 MUTATION_DEGREE =", "def __mul__(self, other): global QUEENS (side1, side2) = random.sample(range(QUEENS + 1), 2) start", "{key: 0 for key in range(2 * QUEENS - 1)} self.cost = 0", "new_maindiagonals[q2 - result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -=", "end = max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data, start,", "(diagonal > 0): self.cost += diagonal - 1 def __str__(self): return self.__data.__str__() +", "= new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost return result @staticmethod def __crossover(mother_data:", "result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1", "if type(countOrData) == int: self.__data = [Chromosome() for _ in range(countOrData)] elif type(countOrData)", "end if i == dimension: i = 0 data[i] = v i +=", "self.__maindiagonals = {key: 0 for key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals =", "new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]] >=", "data = [None] * dimension data[start:end] = mother_data[start:end] i = end for v", "import numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True", "c in self.__data] def __replacement(self, children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] *", "in self.__data] def __replacement(self, children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n)", "[])) else: return Population(sum([parents[i] * parents[i + 1] for i in range(0, len(parents)", "side2) end = max(side1, side2) if PRINT_SLICE_INFO: print(start, end) first_child = Chromosome(self.__crossover(self.__data, other.__data,", "choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL:", "os import random import multiprocessing as mp import numpy import time PRINT_SLICE_INFO =", "_ in range(countOrData)] elif type(countOrData) == list: self.__data = countOrData else: raise Exception()", "= n - best_children_count - other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:] +", "in range(countOrData)] elif type(countOrData) == list: self.__data = countOrData else: raise Exception() self.__data.sort()", "= True PARALLEL = False class Chromosome: def __init__(self, data=None): global QUEENS if", "2)], P_COUNT) manager = mp.Manager() rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i,", "in range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key: 0 for key in range(2", "QUEENS if data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data", "result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost", "+ result.__data[q1]] > 1): new_cost += 1 if new_cost <= result.cost or not", "choices = [] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global", "new_cost += 1 if new_cost <= result.cost or not should_be_better: result.__data[q1], result.__data[q2] =", "range(P_COUNT)] for p in processes: p.start() for p in processes: p.join() return Population(sum(rd.values(),", "list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal > 0): self.cost +=", "result.__data[q2]] -= 1 if (new_maindiagonals[q2 - result.__data[q2]] >= 1): new_cost -= 1 new_antidiagonals[q1", "PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search() t4 = time.time() if PRINT_TIME_INFO: print(f'Local", "i == dimension: i = 0 data[i] = v i += 1 return", "[]) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for i in range(0, len(parents) -", "population.iterate() if population.answer().solved(): break i += 1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO:", "def __replacement(self, children): n = len(children.__data) best_children_count = math.floor(REPLACEMENT[0] * n) other_children_count =", "i = 0 data[i] = v i += 1 return data def solved(self):", "multiprocessing as mp import numpy import time PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True", "i in range(QUEENS): self.__maindiagonals[i - self.__data[i]] += 1 self.__antidiagonals[i + self.__data[i]] += 1", "p in processes: p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i +", "other): global QUEENS (side1, side2) = random.sample(range(QUEENS + 1), 2) start = min(side1,", "= time.time() children = self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1", "new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]] > 1): new_cost +=", "2) if PRINT_SLICE_INFO: print(q1, q2) new_cost = result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals =", "for v in father_data[end:] + father_data[:end]: if v not in data: if i", "sum([pair[0] * pair[1] for pair in pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i +", "= math.floor(REPLACEMENT[0] * n) other_children_count = math.floor(REPLACEMENT[1] * n) other_parents_count = math.floor(REPLACEMENT[2] *", "= data self.__maindiagonals = {key: 0 for key in range(-QUEENS, QUEENS + 1)}", "if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] +=", "self.__crossover() t1 = time.time() if PRINT_TIME_INFO: print(f'Crossover took {t1 - t0}') children.__mutate() t2", "result.__data[q1]] > 1): new_cost += 1 if new_cost <= result.cost or not should_be_better:", "- 1 def __str__(self): return self.__data.__str__() + ': ' + str(self.cost) def __lt__(self,", ".3 ESCAPE_PROPORTION = .5 population = Population(N) i = 0 while True: if", "else: return Population(sum([parents[i] * parents[i + 1] for i in range(0, len(parents) -", "list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal > 0): self.cost += diagonal -", "best_parents_count = n - best_children_count - other_children_count - other_parents_count self.__data = ( children.__data[-best_children_count:]", "i = end if i == dimension: i = 0 data[i] = v", "self.__data] def answer(self) -> Chromosome: return self.__data[-1] def answers(self) -> list: return list(map(lambda", "self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self): return self.__swap(random.randint(LOCAL_SEARCH_DEGREE[0], LOCAL_SEARCH_DEGREE[1]), True) def __swap(self, count, should_be_better):", "is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals = {key:", "result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost += 1 new_antidiagonals[q2", "result.cost new_maindiagonals = result.__maindiagonals.copy() new_antidiagonals = result.__antidiagonals.copy() new_maindiagonals[q1 - result.__data[q1]] -= 1 if", "data: if i == start: i = end if i == dimension: i", "diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal > 0):", "random.sample(children.__data[:(n - best_children_count)], other_children_count) + random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort()", "[Chromosome() for _ in range(countOrData)] elif type(countOrData) == list: self.__data = countOrData else:", "PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement", "data is None: self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals =", "start: i = end if i == dimension: i = 0 data[i] =", "for p in processes: p.join() return Population(sum(rd.values(), [])) else: return Population(sum([parents[i] * parents[i", "if (new_antidiagonals[q2 + result.__data[q2]] >= 1): new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] +=", "1 return data def solved(self): return self.cost == 0 class Population: def __init__(self,", "t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search() t4 =", "* dimension data[start:end] = mother_data[start:end] i = end for v in father_data[end:] +", "1 new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]] > 1): new_cost", "self.__data.sort() def iterate(self): t0 = time.time() children = self.__crossover() t1 = time.time() if", "data[i] = v i += 1 return data def solved(self): return self.cost ==", ".1] ESCAPE_THRESHOLD_PROPORTION = .3 ESCAPE_PROPORTION = .5 population = Population(N) i = 0", "pair_chunk], []) pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for i in range(0, len(parents)", "start, end)) return [first_child, second_child] def __invert__(self): return self.__swap(random.randint(0, MUTATION_DEGREE), False) def __pos__(self):", "+ result.__data[q1]] += 1 if (new_antidiagonals[q2 + result.__data[q1]] > 1): new_cost += 1", "list: dimension = len(mother_data) data = [None] * dimension data[start:end] = mother_data[start:end] i", "+ list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal > 0): self.cost += diagonal", "__mutate(self): self.__data = [~c for c in self.__data] def __replacement(self, children): n =", "1 new_maindiagonals[q2 - result.__data[q1]] += 1 if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost", "i += 1 return data def solved(self): return self.cost == 0 class Population:", "raise Exception() self.__data.sort() def iterate(self): t0 = time.time() children = self.__crossover() t1 =", "result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost return result @staticmethod def", "pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT parents = self.__choose()", "break i += 1 print(population.answer()) t_end = time.time() if PRINT_ALL_TIME_INFO: print(f'The whole process", "range(0, len(roulette), math.ceil(len(roulette) / n)) choices = [] for pointer in pointers: choices.append(self.__data[roulette[pointer]])", "+ random.sample(self.__data[:(n - best_parents_count)], other_parents_count) + self.__data[-best_parents_count:] ) self.__data.sort() def __local_search(self): self.__data =", "if i == start: i = end if i == dimension: i =", "pair_chunk_calculator(i, pair_chunk, rd): rd[i] = sum([pair[0] * pair[1] for pair in pair_chunk], [])", "new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] += 1 if (new_antidiagonals[q1 + result.__data[q2]] >", "1 diagonals = list(self.__maindiagonals.values()) + list(self.__antidiagonals.values()) for diagonal in diagonals: if (diagonal >", "5000 N = 10 MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT =", "return self.__data.__str__() + ': ' + str(self.cost) def __lt__(self, other): return self.cost >", ">= 1): new_cost -= 1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1 +", "= False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO = True PARALLEL = False class Chromosome:", "result.__data[q2], result.__data[q1] result.__maindiagonals = new_maindiagonals result.__antidiagonals = new_antidiagonals result.cost = new_cost return result", "parents[i + 1] for i in range(0, len(parents) - 1, 2)], [])) def", "(new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1", "MUTATION_DEGREE = 1 LOCAL_SEARCH_DEGREE = [150, 200] REPLACEMENT = [.7, .1, .1] ESCAPE_THRESHOLD_PROPORTION", "1): new_cost += 1 if new_cost <= result.cost or not should_be_better: result.__data[q1], result.__data[q2]", "= len(mother_data) data = [None] * dimension data[start:end] = mother_data[start:end] i = end", "PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS = False PRINT_TIME_INFO = False PRINT_ALL_TIME_INFO", "else: raise Exception() self.__data.sort() def iterate(self): t0 = time.time() children = self.__crossover() t1", "PRINT_SLICE_INFO = False PRINT_ITERATION_NO = True PRINT_ITERATION_BEST_ANSWER = True PRINT_ITERATION_BEST_ANSWER_DETAILS = False PRINT_ITERATION_ALL_ANSWERS", "False class Chromosome: def __init__(self, data=None): global QUEENS if data is None: self.__data", "= [] for pointer in pointers: choices.append(self.__data[roulette[pointer]]) return choices def __crossover(self): global P_COUNT", "self.__data = list(range(QUEENS)) random.shuffle(self.__data) else: self.__data = data self.__maindiagonals = {key: 0 for", "+= 1 if (new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1 +", "> 0): self.cost += diagonal - 1 def __str__(self): return self.__data.__str__() + ':", "result.__data[q1]] -= 1 if (new_maindiagonals[q1 - result.__data[q1]] >= 1): new_cost -= 1 new_maindiagonals[q2", "> 1): new_cost += 1 if new_cost <= result.cost or not should_be_better: result.__data[q1],", "global P_COUNT parents = self.__choose() random.shuffle(parents) if PARALLEL: def pair_chunk_calculator(i, pair_chunk, rd): rd[i]", "new_cost -= 1 new_maindiagonals[q1 - result.__data[q2]] += 1 if (new_maindiagonals[q1 - result.__data[q2]] >", "random.shuffle(self.__data) else: self.__data = data self.__maindiagonals = {key: 0 for key in range(-QUEENS,", "= {key: 0 for key in range(-QUEENS, QUEENS + 1)} self.__antidiagonals = {key:", "mother_data[start:end] i = end for v in father_data[end:] + father_data[:end]: if v not", "1) for i in range(n)], []) turning = random.randint(0, n) roulette = roulette[turning:]", "return data def solved(self): return self.cost == 0 class Population: def __init__(self, countOrData):", "pair_chunks = numpy.array_split([[parents[i], parents[i + 1]] for i in range(0, len(parents) - 1,", "int) -> list: dimension = len(mother_data) data = [None] * dimension data[start:end] =", "[None] * dimension data[start:end] = mother_data[start:end] i = end for v in father_data[end:]", "__mul__(self, other): global QUEENS (side1, side2) = random.sample(range(QUEENS + 1), 2) start =", "+ result.__data[q1]] >= 1): new_cost -= 1 new_antidiagonals[q2 + result.__data[q2]] -= 1 if", "t0}') children.__mutate() t2 = time.time() if PRINT_TIME_INFO: print(f'Mutation took {t2 - t1}') self.__replacement(children)", "t1}') self.__replacement(children) t3 = time.time() if PRINT_TIME_INFO: print(f'Replacement took {t3 - t2}') self.__local_search()", "rd = manager.dict() processes = [mp.Process( target=pair_chunk_calculator, args=(i, pair_chunks[i], rd) ) for i", "1 new_antidiagonals[q1 + result.__data[q1]] -= 1 if (new_antidiagonals[q1 + result.__data[q1]] >= 1): new_cost", "(new_maindiagonals[q2 - result.__data[q1]] > 1): new_cost += 1 new_antidiagonals[q1 + result.__data[q2]] += 1", "{t4 - t3}') def __choose(self): n = len(self.__data) roulette = sum([[i] * (i", "i in range(0, len(parents) - 1, 2)], P_COUNT) manager = mp.Manager() rd =", "list, father_data: list, start: int, end: int) -> list: dimension = len(mother_data) data", "== int: self.__data = [Chromosome() for _ in range(countOrData)] elif type(countOrData) == list:" ]
[ "self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf')", "torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False): '''Estimate", "def cov(m, rowvar=False): '''Estimate a covariance matrix given data. Covariance indicates the level", "self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ =", "The element `C_{ii}` is the variance of `x_i`. Args: m: A 1-D or", "cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX", "than 2 dimensions') if m.dim() < 2: m = m.view(1, -1) if not", "data[~c,:] Y = data[c,:] tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y)", "forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor'", ":] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return", "= torch.mean(m, dim=1, keepdim=True) m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self,", "Y = data[c,:] tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x", "+ self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or", "__init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y)", "= Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y =", "J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach()", "or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z)", "return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY =", "forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else:", "calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X =", "test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic) print(x_bar)", "nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X,", "self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def", "def forward(self,data,c,debug_xi = None): X = data[~c, :] Y = data[c, :] target", "n_x = X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x", "__init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling = False self.kernel_type", "if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X,", "k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled = 1.", "self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X = data[~c, :] Y", "observations. Each row of `m` represents a variable, and each column a single", "column represents a variable, while the rows contain observations. Returns: The covariance matrix", "torch from torch import nn from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss", "= torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def", "the variance of `x_i`. Args: m: A 1-D or 2-D array containing multiple", "cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x) idy", "matrix element `C_{ij}` is the covariance of `x_i` and `x_j`. The element `C_{ii}`", "torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:]", "setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y,", "self.calculate_hotelling(Y) pooled = 1. / (n_x + n_y - 2.) * (cov_X +", "y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self,", "variable, while the rows contain observations. Returns: The covariance matrix of the variables.", "test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X)", "import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1 + exp(x)).\"\"\" @staticmethod def", "= False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True", "m: A 1-D or 2-D array containing multiple variables and observations. Each row", "n_x = X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear':", "pred = (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__()", "Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate()", "= self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T)", "is the covariance of `x_i` and `x_j`. The element `C_{ii}` is the variance", "kY = self.calculate_hotelling(Y) pooled = 1. / (n_x + n_y - 2.) *", "= ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1", "1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear':", "level to which two variables vary together. If we examine N-dimensional samples, `X", "if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic)", "= Kernel() def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0]))", "rowvar=False): '''Estimate a covariance matrix given data. Covariance indicates the level to which", "kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(),", "asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return", "= True self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X", "0])) @staticmethod def cov(m, rowvar=False): '''Estimate a covariance matrix given data. Covariance indicates", "data. Covariance indicates the level to which two variables vary together. If we", "= RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif", "return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:] tmp_dev = X.device", "= self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp = torch.tensor(0)", "torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp", "Each row of `m` represents a variable, and each column a single observation", "+ self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y / (n_x + n_y) * torch.sum(z", "2 dimensions') if m.dim() < 2: m = m.view(1, -1) if not rowvar", "= nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX =", "If `rowvar` is True, then each row represents a variable, with observations in", "get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y)", "rowvar: If `rowvar` is True, then each row represents a variable, with observations", "X = data[~c,:] Y = data[c,:] tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y", "self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0] idx =", "= kX - x_bar cov_X = k_X.t() @ k_X return cov_X, x_bar, k_X,", "element `C_{ij}` is the covariance of `x_i` and `x_j`. The element `C_{ii}` is", "Args: m: A 1-D or 2-D array containing multiple variables and observations. Each", "self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base =", "and each column a single observation of all those variables. rowvar: If `rowvar`", "def backward(ctx, grad_output): x, = ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp()", "def forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d))", "self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X = kX - x_bar cov_X =", "tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y", "y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled = 1. / (n_x + n_y -", "`rowvar` is True, then each row represents a variable, with observations in the", "ValueError('m has more than 2 dimensions') if m.dim() < 2: m = m.view(1,", "= X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x =", "else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X", "if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0] T", "ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1 +", ") @staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor'", ":] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target) class", "X = data[~c, :] Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data =", "examine N-dimensional samples, `X = [x_1, x_2, ... x_N]^T`, then the covariance matrix", "m = m.t() # m = m.type(torch.double) # uncomment this line if desired", "-1) if not rowvar and m.size(0) != 1: m = m.t() # m", "m.type(torch.double) # uncomment this line if desired m_mean = torch.mean(m, dim=1, keepdim=True) m", "then each row represents a variable, with observations in the columns. Otherwise, the", "m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X)", "`x_j`. The element `C_{ii}` is the variance of `x_i`. Args: m: A 1-D", "from torch import nn from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class", "''' if m.dim() > 2: raise ValueError('m has more than 2 dimensions') if", "the level to which two variables vary together. If we examine N-dimensional samples,", "if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors y", "The covariance matrix of the variables. ''' if m.dim() > 2: raise ValueError('m", "a variable, with observations in the columns. Otherwise, the relationship is transposed: each", "round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:],", "ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def", "if not rowvar and m.size(0) != 1: m = m.t() # m =", "x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors y =", "= 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() +", "the columns. Otherwise, the relationship is transposed: each column represents a variable, while", "requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X):", "samples, `X = [x_1, x_2, ... x_N]^T`, then the covariance matrix element `C_{ij}`", "torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data ==0", "fixes it... self.hotelling = True self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2) else:", "self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel()", "we examine N-dimensional samples, `X = [x_1, x_2, ... x_N]^T`, then the covariance", "Otherwise, the relationship is transposed: each column represents a variable, while the rows", "= self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling:", "(n_x + n_y - 2.) * (cov_X + cov_Y) z = torch.unsqueeze(x_bar -", "covariance of `x_i` and `x_j`. The element `C_{ii}` is the variance of `x_i`.", "`x_i` and `x_j`. The element `C_{ii}` is the variance of `x_i`. Args: m:", "self.f = Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d):", "self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with", "nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel() def", "> 2: raise ValueError('m has more than 2 dimensions') if m.dim() < 2:", "kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled = 1. /", "if m.dim() < 2: m = m.view(1, -1) if not rowvar and m.size(0)", "kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True self.coeff = min(min(test_nx, test_ny)", "calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y", "- m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def", ":] Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred =", "from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1 + exp(x)).\"\"\"", "self.hotelling = True self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf':", "Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old", "self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX", "grad_output): x, = ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output", "kX - x_bar cov_X = k_X.t() @ k_X return cov_X, x_bar, k_X, kX", "X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY", "class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi =", "self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X = data[~c, :] Y =", "m.dim() > 2: raise ValueError('m has more than 2 dimensions') if m.dim() <", "m = m.view(1, -1) if not rowvar and m.size(0) != 1: m =", "x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1 + y) class stableBCEwithlogits(_Loss): def __init__(self,", "m.size(0) != 1: m = m.t() # m = m.type(torch.double) # uncomment this", "relationship is transposed: each column represents a variable, while the rows contain observations.", "= MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5)", "self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X,", "self).__init__() print(ls) self.ratio = J self.hotelling = False self.kernel_type = kernel_type if kernel_type=='hotelling':", "element `C_{ii}` is the variance of `x_i`. Args: m: A 1-D or 2-D", "x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled =", "forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective", "data[~c, :] Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred", "torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use", "torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False): '''Estimate a covariance matrix given data.", "variables vary together. If we examine N-dimensional samples, `X = [x_1, x_2, ...", "T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y,", "stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x, y):", "super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module):", "return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x", "be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY)", "self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not", "self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False)", "variable, and each column a single observation of all those variables. rowvar: If", "grad_output / (1 + y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f", "X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X,", "m.view(1, -1) if not rowvar and m.size(0) != 1: m = m.t() #", "@staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else", "'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling = False self.kernel_type = kernel_type", "together. If we examine N-dimensional samples, `X = [x_1, x_2, ... x_N]^T`, then", "torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi", "self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0", "of `x_i`. Args: m: A 1-D or 2-D array containing multiple variables and", "variables. rowvar: If `rowvar` is True, then each row represents a variable, with", "= n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic:", "cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar,", "(1 + y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply", "J self.hotelling = False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling", "kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X,", "else (-x).exp() return grad_output / (1 + y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'):", "n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X,", "= [x_1, x_2, ... x_N]^T`, then the covariance matrix element `C_{ij}` is the", "T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0)", "= torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) #", "@staticmethod def cov(m, rowvar=False): '''Estimate a covariance matrix given data. Covariance indicates the", "_tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else:", "forward(self,data,c,debug_xi = None): X = data[~c, :] Y = data[c, :] target =", "nn from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of", "not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale", "self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X)", "< 2: m = m.view(1, -1) if not rowvar and m.size(0) != 1:", "= Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X", "= Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark,", "column a single observation of all those variables. rowvar: If `rowvar` is True,", "rows contain observations. Returns: The covariance matrix of the variables. ''' if m.dim()", "data = torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type =", "variance of `x_i`. Args: m: A 1-D or 2-D array containing multiple variables", "* cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic", "= X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp", "None): X = data[~c, :] Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data", "self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n,", "cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0]", "round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y =", "MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base", "# m = m.type(torch.double) # uncomment this line if desired m_mean = torch.mean(m,", "not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0] T =", "return grad_output / (1 + y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction)", "transposed: each column represents a variable, while the rows contain observations. Returns: The", "k_X = kX - x_bar cov_X = k_X.t() @ k_X return cov_X, x_bar,", "torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type", "m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x =", "min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale =", "y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits()", "= X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate()", "... x_N]^T`, then the covariance matrix element `C_{ij}` is the covariance of `x_i`", "observation of all those variables. rowvar: If `rowvar` is True, then each row", "= self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False): '''Estimate a covariance", "= self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_", "a single observation of all those variables. rowvar: If `rowvar` is True, then", "`m` represents a variable, and each column a single observation of all those", "X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.)", "gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦", "return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0)", "print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled)", "= self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X = kX - x_bar cov_X", "T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with", "`C_{ij}` is the covariance of `x_i` and `x_j`. The element `C_{ii}` is the", "/ (n_x + n_y) * torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X", "+ exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y = exp.log1p()", "* n_y / (n_x + n_y) * torch.sum(z * inv_z) return test_statistic def", "X, T): kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X = kX", "self.ratio = J self.hotelling = False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes", "kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X = kX - x_bar", "fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max())", "def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def", "dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?!", "+ n_y) * torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:]", "self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X", "(cov_X + cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() +", "two variables vary together. If we examine N-dimensional samples, `X = [x_1, x_2,", "covariance matrix element `C_{ij}` is the covariance of `x_i` and `x_j`. The element", "= m.type(torch.double) # uncomment this line if desired m_mean = torch.mean(m, dim=1, keepdim=True)", "x_bar = torch.mean(kX, dim=0) k_X = kX - x_bar cov_X = k_X.t() @", "Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = (data@self.w).squeeze()", "n_y) * torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y", "line if desired m_mean = torch.mean(m, dim=1, keepdim=True) m = m - m_mean", "print(ls) self.ratio = J self.hotelling = False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization", "d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False): '''Estimate a", "x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod", "x_2, ... x_N]^T`, then the covariance matrix element `C_{ij}` is the covariance of", "T): kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X = kX -", "m = m.type(torch.double) # uncomment this line if desired m_mean = torch.mean(m, dim=1,", "m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0", "test_statistic = n_x * n_y / (n_x + n_y) * torch.sum(z * inv_z)", "_tmp = 0 n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY", "get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m,", "given data. Covariance indicates the level to which two variables vary together. If", "print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T):", "2-D array containing multiple variables and observations. Each row of `m` represents a", "#Regularization fixes it... self.hotelling = True self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2)", "#The lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y)", "else: _tmp = 0 n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X)", "T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x = X.shape[0] n_y", "backward(ctx, grad_output): x, = ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return", "= torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y", "cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x +", "y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx,", "those variables. rowvar: If `rowvar` is True, then each row represents a variable,", "of all those variables. rowvar: If `rowvar` is True, then each row represents", "torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp =", "cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled = 1. / (n_x + n_y", "single observation of all those variables. rowvar: If `rowvar` is True, then each", "and m.size(0) != 1: m = m.t() # m = m.type(torch.double) # uncomment", "cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.)", "= J self.hotelling = False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes it...", "2.) * (cov_X + cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ =", "old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY =", "cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x = X.shape[0] n_y =", "= self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z", "return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x)", "self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y / (n_x + n_y) * torch.sum(z *", "represents a variable, while the rows contain observations. Returns: The covariance matrix of", "dim=0) k_X = kX - x_bar cov_X = k_X.t() @ k_X return cov_X,", "X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y =", "def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y):", "2: raise ValueError('m has more than 2 dimensions') if m.dim() < 2: m", "n_x * n_y / (n_x + n_y) * torch.sum(z * inv_z) return test_statistic", "self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X = data[~c, :]", "asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif", "Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio)", "(data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio", "uncomment this line if desired m_mean = torch.mean(m, dim=1, keepdim=True) m = m", "= data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = (data@self.w).squeeze() return", "X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp =", "forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:] tmp_dev = X.device if not self.hotelling:", "all those variables. rowvar: If `rowvar` is True, then each row represents a", "+ n_y - 2.) * (cov_X + cov_Y) z = torch.unsqueeze(x_bar - y_bar,", "@staticmethod def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half()", "elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny)", "= 0 n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY =", "import torch from torch import nn from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import", "1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y / (n_x", "+ n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate()", "data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target)", "(n_x + n_y) * torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X =", "1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d", "print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self,", "cov(m, rowvar=False): '''Estimate a covariance matrix given data. Covariance indicates the level to", "+ y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def", "a variable, and each column a single observation of all those variables. rowvar:", "= data[~c,:] Y = data[c,:] tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y =", "print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X,", "nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5)", "cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled", "super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling = False self.kernel_type = kernel_type if", "the variables. ''' if m.dim() > 2: raise ValueError('m has more than 2", "def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if", "of x ↦ log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp = x.exp()", "torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking me...", "* (cov_X + cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float()", "and `x_j`. The element `C_{ii}` is the variance of `x_i`. Args: m: A", "= min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale", "N-dimensional samples, `X = [x_1, x_2, ... x_N]^T`, then the covariance matrix element", "= self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x = X.shape[0] n_y = Y.shape[0]", "+ n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale", "print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic", "test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:] tmp_dev = X.device if", "X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio)", "class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x,", "idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x],", "which two variables vary together. If we examine N-dimensional samples, `X = [x_1,", "= x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y )", "self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY", "RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern':", "row represents a variable, with observations in the columns. Otherwise, the relationship is", "m_mean = torch.mean(m, dim=1, keepdim=True) m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def", "_tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False)", "a covariance matrix given data. Covariance indicates the level to which two variables", "is True, then each row represents a variable, with observations in the columns.", "return test_statistic def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX,", "= nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X =", "each column represents a variable, while the rows contain observations. Returns: The covariance", "to which two variables vary together. If we examine N-dimensional samples, `X =", "[x_1, x_2, ... x_N]^T`, then the covariance matrix element `C_{ij}` is the covariance", "dimensions') if m.dim() < 2: m = m.view(1, -1) if not rowvar and", "Use old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY", "observations. Returns: The covariance matrix of the variables. ''' if m.dim() > 2:", "matrix of the variables. ''' if m.dim() > 2: raise ValueError('m has more", "= torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data", "min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad(): d", "raise ValueError('m has more than 2 dimensions') if m.dim() < 2: m =", "self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx,", "torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y / (n_x + n_y) *", "keepdim=True) m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar =", "x_N]^T`, then the covariance matrix element `C_{ij}` is the covariance of `x_i` and", "0 n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y)", "def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X", "covariance matrix of the variables. ''' if m.dim() > 2: raise ValueError('m has", "the covariance matrix element `C_{ij}` is the covariance of `x_i` and `x_j`. The", "y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y /", ":].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX =", "Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX", "is the variance of `x_i`. Args: m: A 1-D or 2-D array containing", "X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def", "me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig)", "T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def", "print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x +", "requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale", "and observations. Each row of `m` represents a variable, and each column a", "k_Y, kY = self.calculate_hotelling(Y) pooled = 1. / (n_x + n_y - 2.)", "m.dim() < 2: m = m.view(1, -1) if not rowvar and m.size(0) !=", "class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling", "the rows contain observations. Returns: The covariance matrix of the variables. ''' if", "-self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J", "self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d >", "def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling = False", "x ↦ log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x)", "Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp", "the covariance of `x_i` and `x_j`. The element `C_{ii}` is the variance of", "MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling =", "get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if", "> 0])) @staticmethod def cov(m, rowvar=False): '''Estimate a covariance matrix given data. Covariance", "= round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :],", "= self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y,", "super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X = data[~c,", "contain observations. Returns: The covariance matrix of the variables. ''' if m.dim() >", "==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar)", "test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or", "import nn from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation", "if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X =", "with observations in the columns. Otherwise, the relationship is transposed: each column represents", "more than 2 dimensions') if m.dim() < 2: m = m.view(1, -1) if", "of `m` represents a variable, and each column a single observation of all", "vary together. If we examine N-dimensional samples, `X = [x_1, x_2, ... x_N]^T`,", "of the variables. ''' if m.dim() > 2: raise ValueError('m has more than", "matrix given data. Covariance indicates the level to which two variables vary together.", "x): exp = x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else", "y ) @staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors y = (-x).exp().half() if", "if kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True self.coeff = min(min(test_nx, test_ny) **", "print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def", "= None): X = data[~c, :] Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device)", "torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y],", "is transposed: each column represents a variable, while the rows contain observations. Returns:", "= n_x * n_y / (n_x + n_y) * torch.sum(z * inv_z) return", "= X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y =", "if desired m_mean = torch.mean(m, dim=1, keepdim=True) m = m - m_mean return", "T).evaluate() x_bar = torch.mean(kX, dim=0) k_X = kX - x_bar cov_X = k_X.t()", "= LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff", "= torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y / (n_x + n_y)", "sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp = 0", "True, then each row represents a variable, with observations in the columns. Otherwise,", "cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x", "stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X = data[~c, :] Y = data[c, :]", "not rowvar and m.size(0) != 1: m = m.t() # m = m.type(torch.double)", "** asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X)", "= self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX =", "self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX", "return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def", "(-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1 + y) class stableBCEwithlogits(_Loss):", "= torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad():", "the relationship is transposed: each column represents a variable, while the rows contain", "if not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp)", "with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else:", "sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale = nn.Parameter(sig.unsqueeze(-1).to(tmp_dev),requires_grad=False) # Use old setup?!??!?!?! else: _tmp =", "self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z =", "with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False):", "!= 1: m = m.t() # m = m.type(torch.double) # uncomment this line", "= self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if", "self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY", "cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic =", "each row represents a variable, with observations in the columns. Otherwise, the relationship", "= torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(),", "= min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad():", "T) else: _tmp = 0 n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX =", "a variable, while the rows contain observations. Returns: The covariance matrix of the", "test_statistic def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0)", "= torch.mean(kX, dim=0) k_X = kX - x_bar cov_X = k_X.t() @ k_X", "# uncomment this line if desired m_mean = torch.mean(m, dim=1, keepdim=True) m =", "def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0]", ":], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T): kX = self.kernel_X(X, T).evaluate() kY =", "inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if", "= stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X = data[~c, :] Y = data[c,", "self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x = X.shape[0]", "T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX =", "kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y,", "from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x", "/ (n_x + n_y - 2.) * (cov_X + cov_Y) z = torch.unsqueeze(x_bar", "n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The", "torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig", "= (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls)", "def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0] idx = torch.randperm(n_x) idy =", "columns. Otherwise, the relationship is transposed: each column represents a variable, while the", "LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff =", "then the covariance matrix element `C_{ij}` is the covariance of `x_i` and `x_j`.", "y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1 + y)", "cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1)", "self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0] T = torch.cat([T_x,", "If we examine N-dimensional samples, `X = [x_1, x_2, ... x_N]^T`, then the", "= torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x", "data[c,:] tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0]", "= X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled =", "self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True self.coeff =", "return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio =", "* inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:] tmp_dev", "represents a variable, and each column a single observation of all those variables.", "print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y))", "= data[c,:] tmp_dev = X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x =", "reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y) class", "self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var)", "or test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y)", "J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:],", "= X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y", "= exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx, grad_output):", "1. / (n_x + n_y - 2.) * (cov_X + cov_Y) z =", "torch import nn from gpytorch.kernels import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function):", "1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev))", "rowvar and m.size(0) != 1: m = m.t() # m = m.type(torch.double) #", "= self.calculate_hotelling(Y) pooled = 1. / (n_x + n_y - 2.) * (cov_X", "self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled = 1. / (n_x +", "A 1-D or 2-D array containing multiple variables and observations. Each row of", "= Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X, Y],", "pooled = 1./(n_x+n_y-2.) * cov_X + cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float()", "m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y):", "# Use old setup?!??!?!?! else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T)", "n_y / (n_x + n_y) * torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None):", "= m.t() # m = m.type(torch.double) # uncomment this line if desired m_mean", "n_y = Y.shape[0] idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y", "class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx, x):", "pooled = 1. / (n_x + n_y - 2.) * (cov_X + cov_Y)", "array containing multiple variables and observations. Each row of `m` represents a variable,", "return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx, grad_output): x, =", "= nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel()", "Returns: The covariance matrix of the variables. ''' if m.dim() > 2: raise", "containing multiple variables and observations. Each row of `m` represents a variable, and", "n_y) * torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be", "= round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y", "= self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp = 0 n_x =", "import LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1", "variables and observations. Each row of `m` represents a variable, and each column", "exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y = exp.log1p() return", "= 1. / (n_x + n_y - 2.) * (cov_X + cov_Y) z", "+ cov_Y*1./(n_x+n_y-2.) z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x", "multiple variables and observations. Each row of `m` represents a variable, and each", "z = torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y)", "= self.cov(X) return cov_X,x_bar,0,0 def get_sample_witness(self,X,Y): n_x = X.shape[0] n_y = Y.shape[0] idx", "cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic", "else y ) @staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors y = (-x).exp().half()", "observations in the columns. Otherwise, the relationship is transposed: each column represents a", "it... self.hotelling = True self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if", "2: m = m.view(1, -1) if not rowvar and m.size(0) != 1: m", "idx = torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x,", "inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y / (n_x +", "`x_i`. Args: m: A 1-D or 2-D array containing multiple variables and observations.", "torch.mean(kX, dim=0) k_X = kX - x_bar cov_X = k_X.t() @ k_X return", "= torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3):", "+ cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device))", "↦ log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y", "Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) * cov_X +", "not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: cov_X, x_bar,", "print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X,", "n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled = 1./(n_x+n_y-2.) *", "Kernel() def get_median_ls(self,X): with torch.no_grad(): d = self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod", "dim=1, keepdim=True) m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar", "`C_{ii}` is the variance of `x_i`. Args: m: A 1-D or 2-D array", "= m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X): cov_X,x_bar = self.cov(X) return", "variables. ''' if m.dim() > 2: raise ValueError('m has more than 2 dimensions')", "self.hotelling = False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling =", "kX = self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not", "_Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx,", "row of `m` represents a variable, and each column a single observation of", "print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T): kX", "print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x", "'''Estimate a covariance matrix given data. Covariance indicates the level to which two", "Log1PlusExp.apply def forward(self, x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__()", "represents a variable, with observations in the columns. Otherwise, the relationship is transposed:", "n_y - 2.) * (cov_X + cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1)", "if m.dim() > 2: raise ValueError('m has more than 2 dimensions') if m.dim()", "= m.view(1, -1) if not rowvar and m.size(0) != 1: m = m.t()", "indicates the level to which two variables vary together. If we examine N-dimensional", "torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1 + exp(x)).\"\"\" @staticmethod", "inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:] tmp_dev =", "linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None):", "def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y = data[c,:] tmp_dev = X.device if not", "torch.mean(m, dim=1, keepdim=True) m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze() def calculate_hotelling(self, X):", "LinearKernel,MaternKernel,RBFKernel,Kernel from torch.nn.modules.loss import _Loss class Log1PlusExp(torch.autograd.Function): \"\"\"Implementation of x ↦ log(1 +", "m.t() # m = m.type(torch.double) # uncomment this line if desired m_mean =", "X.device if not self.hotelling: T_x,T_y,X,Y = self.get_sample_witness(X,Y) n_x = X.shape[0] n_y = Y.shape[0]", "lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX)", "while the rows contain observations. Returns: The covariance matrix of the variables. '''", "1: m = m.t() # m = m.type(torch.double) # uncomment this line if", "else: _tmp = torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T)", "this line if desired m_mean = torch.mean(m, dim=1, keepdim=True) m = m -", "covariance matrix given data. Covariance indicates the level to which two variables vary", "has more than 2 dimensions') if m.dim() < 2: m = m.view(1, -1)", "= self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y) pooled = 1. / (n_x", "return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False): '''Estimate a covariance matrix given", "in the columns. Otherwise, the relationship is transposed: each column represents a variable,", "True self.coeff = min(min(test_nx, test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X =", "T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig =", "each column a single observation of all those variables. rowvar: If `rowvar` is", "T_x, T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :]", "print(cov_X) print(cov_Y) print(k_X) print(k_Y) print(kX) print(kY) print(_tmp.min(),_tmp.max()) print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return", "self.kernel_base.covar_dist(X,X) return torch.sqrt(torch.median(d[d > 0])) @staticmethod def cov(m, rowvar=False): '''Estimate a covariance matrix", ":].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return T_x,T_y,X,Y def get_umap_stuff(self,X,Y,T):", "torch.randperm(n_x) idy = torch.randperm(n_y) J_x = round(n_x*self.ratio) J_y = round(n_y*self.ratio) T_x, T_y =", "1-D or 2-D array containing multiple variables and observations. Each row of `m`", "test_ny) ** asymp_n, 1e-5) self.kernel_base = Kernel() def get_median_ls(self,X): with torch.no_grad(): d =", "variable, with observations in the columns. Otherwise, the relationship is transposed: each column", "def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T)", "self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach() with torch.no_grad(): sig = self.get_median_ls(_tmp) self.kernel_X.raw_lengthscale =", "z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic =", "= torch.tensor(0) sig=0 cov_X,x_bar,k_X,kX = self.calculate_ME_hotelling(X, T) cov_Y,y_bar,k_Y,kY = self.calculate_ME_hotelling(Y, T) else: _tmp", "Y.shape[0] T = torch.cat([T_x, T_y],dim=0) if not self.kernel_type=='linear': _tmp = torch.cat([X, Y], dim=0).detach()", "= data[~c, :] Y = data[c, :] target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y])", "`X = [x_1, x_2, ... x_N]^T`, then the covariance matrix element `C_{ij}` is", "x, y): return torch.mean(self.f(x)-x*y) class linear_benchmark(nn.Module): def __init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective =", "T) else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY =", "= (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1 + y) class", "def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate() x_bar = torch.mean(kX, dim=0) k_X", "or 2-D array containing multiple variables and observations. Each row of `m` represents", "__init__(self,d): super(linear_benchmark, self).__init__() self.register_buffer('w',torch.ones(d)) self.objective = stableBCEwithlogits() def forward(self,data,c,debug_xi = None): X =", "target = torch.cat([torch.zeros(X.shape[0]),torch.ones(Y.shape[0])]).to(X.device) data = torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module):", "exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx, grad_output): x,", "torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) * torch.sum(z*inv_z)", "kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) elif kernel_type=='linear': self.kernel_X = LinearKernel()", "else: cov_X, x_bar, k_X, kX = self.calculate_hotelling(X) cov_Y, y_bar, k_Y, kY = self.calculate_hotelling(Y)", "log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp = x.exp() ctx.save_for_backward(x) y =", "of `x_i` and `x_j`. The element `C_{ii}` is the variance of `x_i`. Args:", "test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking me... print(test_statistic) print(x_bar) print(y_bar) print(inv_z) print(cov_X)", "print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate() x_bar =", "** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False)", "\"\"\"Implementation of x ↦ log(1 + exp(x)).\"\"\" @staticmethod def forward(ctx, x): exp =", "elif kernel_type=='linear': self.kernel_X = LinearKernel() self.kernel_X._set_variance(linear_var) elif kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale =", "(-x).exp() return grad_output / (1 + y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits,", "torch.cat([X,Y]) pred = (data@self.w).squeeze() return -self.objective(pred,target) class MEstat(nn.Module): def __init__(self,J,ls=10,test_nx=1,test_ny=1,asymp_n=-1,kernel_type = 'rbf',linear_var=1e-3): super(MEstat,", "* torch.sum(z*inv_z) if test_statistic.data ==0 or test_statistic==float('inf') or test_statistic!=test_statistic: #The lengthscale be fucking", "- y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x * n_y", "print(sig) print(n_x*n_y/(n_x + n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T): kX =", "def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f = Log1PlusExp.apply def forward(self, x, y): return", "exp = x.exp() ctx.save_for_backward(x) y = exp.log1p() return x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y", "n_y)) print(pooled) return test_statistic def calculate_ME_hotelling(self, X, T): kX = self.kernel_X(X, T).evaluate() x_bar", "n_x = X.shape[0] n_y = Y.shape[0] cov_X,x_bar,k_X,kX = self.calculate_hotelling(X) cov_Y,y_bar,k_Y,kY = self.calculate_hotelling(Y) pooled", "desired m_mean = torch.mean(m, dim=1, keepdim=True) m = m - m_mean return m.matmul(m.t()).squeeze(),m_mean.squeeze()", "kernel_type=='matern': self.kernel_X = MaternKernel(nu=2.5) self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(), requires_grad=False) self.coeff = min(min(test_nx, test_ny) **", "= 'rbf',linear_var=1e-3): super(MEstat, self).__init__() print(ls) self.ratio = J self.hotelling = False self.kernel_type =", "Covariance indicates the level to which two variables vary together. If we examine", "torch.unsqueeze(x_bar - y_bar, 1) inv_z,_ = torch.solve(z,pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(pooled.device)) test_statistic = n_x *", "if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output / (1 + y) class stableBCEwithlogits(_Loss): def", "/ (1 + y) class stableBCEwithlogits(_Loss): def __init__(self, reduction='mean'): super(stableBCEwithlogits, self).__init__(reduction=reduction) self.f =", "- 2.) * (cov_X + cov_Y) z = torch.unsqueeze(x_bar - y_bar, 1) inv_z,_", "self.kernel_X(X, T).evaluate() kY = self.kernel_X(Y,T).evaluate() return kX,kY,torch.cat([kX,kY],dim=0) def forward_plain(self,X,Y,T,n_x,n_y): if not self.hotelling: cov_X,x_bar,k_X,kX", "= torch.unsqueeze(x_bar-y_bar,1) inv_z,_ = torch.solve(z.float(),pooled.float() + self.coeff*torch.eye(pooled.shape[0]).float().to(tmp_dev)) test_statistic = n_x*n_y/(n_x + n_y) *", "x, = ctx.saved_tensors y = (-x).exp().half() if x.type()=='torch.cuda.HalfTensor' else (-x).exp() return grad_output /", "test_ny) ** asymp_n, 1e-2) else: if kernel_type=='rbf': self.kernel_X = RBFKernel() self.kernel_X.raw_lengthscale = nn.Parameter(torch.tensor([ls]).float(),", "= kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True self.coeff = min(min(test_nx,", "False self.kernel_type = kernel_type if kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True self.coeff", "x.where(torch.isinf(exp),y.half() if x.type()=='torch.cuda.HalfTensor' else y ) @staticmethod def backward(ctx, grad_output): x, = ctx.saved_tensors", "kernel_type=='hotelling': #Regularization fixes it... self.hotelling = True self.coeff = min(min(test_nx, test_ny) ** asymp_n,", "T_y = X[idx[:J_x], :].detach(), Y[idy[:J_y], :].detach() X,Y = X[idx[J_x:], :], Y[idy[J_y:], :] return", "* torch.sum(z * inv_z) return test_statistic def forward(self,data,c,debug_xi_hat=None): X = data[~c,:] Y =" ]
[ "from rembg.multiprocessing import parallel_greenscreen if __name__ == \"__main__\": parallel_greenscreen(\"/Users/zihao/Desktop/zero/video/group15B_Short.avi\", 3, 1, \"u2net_human_seg\", frame_limit=300)" ]
[ "for i in range(len(log_dat)-1): # извлечение данных из списка и распределение их по", "data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() # графики изменения температуры", "plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number", "range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter += 1 if((counter == len(func_dat[element]) -", "1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] / 30", "+ iterator] data_temp_out[i] = data_temp_out[i] / 30 for i in range(lengthList3 - 30):", "1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] / 30", "- 30): # усреднение значений iterator = 0 for iterator_1 in range(30): iterator", "plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature,", "in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try: data = reading_data() except", "1 == 5)): log_dat.append(func_dat[element]) counter = 0 for i in range(len(log_dat)-1): # извлечение", "plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('pressure, kPa')", "iterator_1 in range(30): iterator += 1 data_pressure[i] = data_pressure[i] + data_pressure[i + iterator]", "line in f: if(line != '\\n'): func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\" функция,", "sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() # графики изменения температуры внутри и", "plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312)", "[0] # финальный список данных о температуре окружающей среды data_pressure = [0] #", "def reading_data(): func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f:", "5)): log_dat.append(func_dat[element]) counter = 0 for i in range(len(log_dat)-1): # извлечение данных из", "<filename>Python/cube-can-sat-2016/soft/desktop/src/thermodynamic_parameters/src.py import matplotlib.pyplot as plt def reading_data(): func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\",", "i in range(len(log_dat)-1): # извлечение данных из списка и распределение их по трем", "IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): #", "plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('pressure, kPa') plt.grid() plt.tight_layout() plt.savefig(\"../../img/termo.png\",", "in range(lengthList1 - 30): # усреднение значений iterator = 0 for iterator_1 in", "def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2", "данных о температуре внутри аппарата data_temp_out = [0] # финальный список данных о", "аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points')", "plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number", "element = log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3]) +", "of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points')", "'9', ';'} for element in range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1): if(func_dat[element][i]", "'r') for line in f: if(line != '\\n'): func_dat.append(line) return func_dat def sort_data(func_dat):", "plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10')", "except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure):", "# усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 =", "range(lengthList3 - 30): # усреднение значений iterator = 0 for iterator_1 in range(30):", "data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try: data = reading_data() except BaseException: print('ошибка", "func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\" функция, парсящая данные из списка, которые считываются", "усреднение значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_in[i]", "len(data_pressure) for i in range(lengthList1 - 30): # усреднение значений iterator = 0", "0 for iterator_1 in range(30): iterator += 1 data_pressure[i] = data_pressure[i] + data_pressure[i", "= (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1])", "data_temp_out[i] = data_temp_out[i] / 30 for i in range(lengthList3 - 30): # усреднение", "по трем другим element = log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2]) element[2]", "усреднение значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_pressure[i]", "sort_data(func_dat): \"\"\" функция, парсящая данные из списка, которые считываются в другие списки \"\"\"", "celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('pressure, kPa') plt.grid()", "парсящая данные из списка, которые считываются в другие списки \"\"\" log_dat = []", "data_pressure[i] = data_pressure[i] / 30 for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data", "data_pressure[i] = data_pressure[i] + data_pressure[i + iterator] data_pressure[i] = data_pressure[i] / 30 for", "изменения температуры внутри и снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside')", "in range(30): iterator += 1 data_pressure[i] = data_pressure[i] + data_pressure[i + iterator] data_pressure[i]", "и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature,", "= data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] / 30 for i", "'6', '7', '8', '9', ';'} for element in range(len(func_dat)-1): for i in range(len(func_dat[element])", "распределение их по трем другим element = log_dat[i].split(';') element[0] = float(element[1]) element[1] =", "points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('pressure,", "+ float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in,", "трем другим element = log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2]) element[2] =", "as plt def reading_data(): func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line", "с датчика давления basic_elements = {'.', '0', '1', '2', '3', '4', '5', '6',", "i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try: data = reading_data()", "= sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() # графики изменения температуры внутри", "изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius')", "(float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return", "open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f: if(line != '\\n'): func_dat.append(line) return func_dat def", "len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter = 0", "скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for i", "финальный список данных о температуре окружающей среды data_pressure = [0] # финальный список", "data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] / 30 for i in", "давления basic_elements = {'.', '0', '1', '2', '3', '4', '5', '6', '7', '8',", "# финальный список данных о температуре окружающей среды data_pressure = [0] # финальный", "i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter += 1 if((counter ==", "in range(30): iterator += 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i]", "'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ';'} for element", "plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure)", "plt.figure() # графики изменения температуры внутри и снаружи аппарата и изменения давления plt.subplot(311)", "= [] try: data = reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure", "BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig =", "plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313)", "# усреднение значений iterator = 0 for iterator_1 in range(30): iterator += 1", "try: data = reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data)", "element[0] = float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0])", "данных с датчика давления basic_elements = {'.', '0', '1', '2', '3', '4', '5',", "из списка, которые считываются в другие списки \"\"\" log_dat = [] counter =", "= {'.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ';'}", "data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out,", "celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius')", "= 0 for iterator_1 in range(30): iterator += 1 data_temp_out[i] = data_temp_out[i] +", "iterator += 1 data_pressure[i] = data_pressure[i] + data_pressure[i + iterator] data_pressure[i] = data_pressure[i]", "[0] # финальный список данных с датчика давления basic_elements = {'.', '0', '1',", "data_temp_in[i] = data_temp_in[i] / 30 for i in range(lengthList2 - 30): # усреднение", "in basic_elements): counter += 1 if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) -", "data_temp_out, data_pressure) fig = plt.figure() # графики изменения температуры внутри и снаружи аппарата", "[] counter = 0 data_temp_in = [0] # финальный список данных о температуре", "списка, которые считываются в другие списки \"\"\" log_dat = [] counter = 0", "данных из списка и распределение их по трем другим element = log_dat[i].split(';') element[0]", "if(func_dat[element][i] in basic_elements): counter += 1 if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';'))", "iterator_1 in range(30): iterator += 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator]", "iterator] data_pressure[i] = data_pressure[i] / 30 for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop()", "/ 30 for i in range(lengthList3 - 30): # усреднение значений iterator =", "for i in range(lengthList3 - 30): # усреднение значений iterator = 0 for", "о температуре окружающей среды data_pressure = [0] # финальный список данных с датчика", "данных о температуре окружающей среды data_pressure = [0] # финальный список данных с", "fig = plt.figure() # графики изменения температуры внутри и снаружи аппарата и изменения", "len(data_temp_out) lengthList3 = len(data_pressure) for i in range(lengthList1 - 30): # усреднение значений", "финальный список данных о температуре внутри аппарата data_temp_out = [0] # финальный список", "'2', '3', '4', '5', '6', '7', '8', '9', ';'} for element in range(len(func_dat)-1):", "for i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter += 1 if((counter", "plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10')", "другие списки \"\"\" log_dat = [] counter = 0 data_temp_in = [0] #", "data_pressure) fig = plt.figure() # графики изменения температуры внутри и снаружи аппарата и", "data_pressure[i] / 30 for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = []", "try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure", "in range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter +=", "функция, парсящая данные из списка, которые считываются в другие списки \"\"\" log_dat =", "data_temp_out[i] / 30 for i in range(lengthList3 - 30): # усреднение значений iterator", "о температуре внутри аппарата data_temp_out = [0] # финальный список данных о температуре", "другим element = log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3])", "'8', '9', ';'} for element in range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1):", "= open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f: if(line != '\\n'): func_dat.append(line) return func_dat", "= 0 for iterator_1 in range(30): iterator += 1 data_temp_in[i] = data_temp_in[i] +", "data_pressure[i + iterator] data_pressure[i] = data_pressure[i] / 30 for i in range(30): data_temp_in.pop()", "список данных с датчика давления basic_elements = {'.', '0', '1', '2', '3', '4',", "= data_pressure[i] + data_pressure[i + iterator] data_pressure[i] = data_pressure[i] / 30 for i", "data_pressure[i] + data_pressure[i + iterator] data_pressure[i] = data_pressure[i] / 30 for i in", "списка и распределение их по трем другим element = log_dat[i].split(';') element[0] = float(element[1])", "= data_temp_out[i] / 30 for i in range(lengthList3 - 30): # усреднение значений", "которые считываются в другие списки \"\"\" log_dat = [] counter = 0 data_temp_in", "in range(lengthList3 - 30): # усреднение значений iterator = 0 for iterator_1 in", "iterator += 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i]", "iterator] data_temp_out[i] = data_temp_out[i] / 30 for i in range(lengthList3 - 30): #", "их по трем другим element = log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2])", "basic_elements): counter += 1 if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1", "\"\"\" функция, парсящая данные из списка, которые считываются в другие списки \"\"\" log_dat", "доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() #", "func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f: if(line !=", "in f: if(line != '\\n'): func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\" функция, парсящая", "= [0] # финальный список данных о температуре внутри аппарата data_temp_out = [0]", "for element in range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements):", "i in range(lengthList3 - 30): # усреднение значений iterator = 0 for iterator_1", "= [0] # финальный список данных с датчика давления basic_elements = {'.', '0',", "lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for i in range(lengthList1", "data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out)", "counter = 0 data_temp_in = [0] # финальный список данных о температуре внутри", "# извлечение данных из списка и распределение их по трем другим element =", "basic_elements = {'.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9',", "except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig", "for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try: data =", "plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure')", "log_dat.append(func_dat[element]) counter = 0 for i in range(len(log_dat)-1): # извлечение данных из списка", "in range(len(log_dat)-1): # извлечение данных из списка и распределение их по трем другим", "- 1) and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter = 0 for", "{'.', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ';'} for", "data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in,", "';'} for element in range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in", "списки \"\"\" log_dat = [] counter = 0 data_temp_in = [0] # финальный", "30 for i in range(lengthList2 - 30): # усреднение значений iterator = 0", "значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_pressure[i] =", "'7', '8', '9', ';'} for element in range(len(func_dat)-1): for i in range(len(func_dat[element]) -", "= data_pressure[i] / 30 for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data =", "+ iterator] data_pressure[i] = data_pressure[i] / 30 for i in range(30): data_temp_in.pop() data_temp_out.pop()", "if(line != '\\n'): func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\" функция, парсящая данные из", "data_temp_out.pop() data_pressure.pop() data = [] try: data = reading_data() except BaseException: print('ошибка доступа')", "[] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f: if(line != '\\n'): func_dat.append(line)", "# финальный список данных о температуре внутри аппарата data_temp_out = [0] # финальный", "plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of", "= float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError:", "0 data_temp_in = [0] # финальный список данных о температуре внутри аппарата data_temp_out", "1) and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter = 0 for i", "of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of", "float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out,", "data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] / 30 for i in range(lengthList2 -", "data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] / 30 for", "30 for i in range(lengthList3 - 30): # усреднение значений iterator = 0", "[] try: data = reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure =", "data_pressure): # усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3", "in range(30): iterator += 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i]", "извлечение данных из списка и распределение их по трем другим element = log_dat[i].split(';')", "iterator += 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i]", "== 5)): log_dat.append(func_dat[element]) counter = 0 for i in range(len(log_dat)-1): # извлечение данных", "plt def reading_data(): func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in", "data_temp_in[i] / 30 for i in range(lengthList2 - 30): # усреднение значений iterator", "усреднение значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_out[i]", "func_dat def sort_data(func_dat): \"\"\" функция, парсящая данные из списка, которые считываются в другие", "in range(lengthList2 - 30): # усреднение значений iterator = 0 for iterator_1 in", "data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая", "финальный список данных с датчика давления basic_elements = {'.', '0', '1', '2', '3',", "\"\"\" log_dat = [] counter = 0 data_temp_in = [0] # финальный список", "range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter += 1", "30): # усреднение значений iterator = 0 for iterator_1 in range(30): iterator +=", "и снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number", "reading_data(): func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f: if(line", "plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out)", "iterator] data_temp_in[i] = data_temp_in[i] / 30 for i in range(lengthList2 - 30): #", "1 data_pressure[i] = data_pressure[i] + data_pressure[i + iterator] data_pressure[i] = data_pressure[i] / 30", "!= '\\n'): func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\" функция, парсящая данные из списка,", "plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10')", "из списка и распределение их по трем другим element = log_dat[i].split(';') element[0] =", "# финальный список данных с датчика давления basic_elements = {'.', '0', '1', '2',", "= reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out,", "- 1 == 5)): log_dat.append(func_dat[element]) counter = 0 for i in range(len(log_dat)-1): #", "data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() # графики", "1): if(func_dat[element][i] in basic_elements): counter += 1 if((counter == len(func_dat[element]) - 1) and", "matplotlib.pyplot as plt def reading_data(): func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for", "def sort_data(func_dat): \"\"\" функция, парсящая данные из списка, которые считываются в другие списки", "данные из списка, которые считываются в другие списки \"\"\" log_dat = [] counter", "for i in range(lengthList1 - 30): # усреднение значений iterator = 0 for", "# графики изменения температуры внутри и снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in)", "data = [] try: data = reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out,", "f: if(line != '\\n'): func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\" функция, парсящая данные", "element in range(len(func_dat)-1): for i in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter", "+ iterator] data_temp_in[i] = data_temp_in[i] / 30 for i in range(lengthList2 - 30):", "снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of", "range(len(log_dat)-1): # извлечение данных из списка и распределение их по трем другим element", "= len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for i in range(lengthList1 -", "'1', '2', '3', '4', '5', '6', '7', '8', '9', ';'} for element in", "+= 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] /", "= 0 data_temp_in = [0] # финальный список данных о температуре внутри аппарата", "графики изменения температуры внутри и снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature", "for iterator_1 in range(30): iterator += 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i +", "i in range(lengthList2 - 30): # усреднение значений iterator = 0 for iterator_1", "data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение", "значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for", "список данных о температуре внутри аппарата data_temp_out = [0] # финальный список данных", "data_pressure = [0] # финальный список данных с датчика давления basic_elements = {'.',", "30 for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try: data", "float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1])", "усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure)", "средняя) lengthList1 = len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for i in", "+ data_pressure[i + iterator] data_pressure[i] = data_pressure[i] / 30 for i in range(30):", "== len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter =", "data_pressure.pop() data = [] try: data = reading_data() except BaseException: print('ошибка доступа') data_temp_in,", "= len(data_temp_out) lengthList3 = len(data_pressure) for i in range(lengthList1 - 30): # усреднение", "+ data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] / 30 for i in range(lengthList2", "0 for iterator_1 in range(30): iterator += 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i", "= [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f: if(line != '\\n'):", "data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая", "data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя) lengthList1", "iterator = 0 for iterator_1 in range(30): iterator += 1 data_pressure[i] = data_pressure[i]", "points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points')", "range(30): iterator += 1 data_pressure[i] = data_pressure[i] + data_pressure[i + iterator] data_pressure[i] =", "(len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter = 0 for i in range(len(log_dat)-1):", "data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] / 30 for i in", "in range(len(func_dat[element]) - 1): if(func_dat[element][i] in basic_elements): counter += 1 if((counter == len(func_dat[element])", "iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_in[i] = data_temp_in[i]", "plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('pressure, kPa') plt.grid() plt.tight_layout()", "= len(data_pressure) for i in range(lengthList1 - 30): # усреднение значений iterator =", "+= 1 if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1 == 5)):", "+= 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] /", "data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] / 30 for i in range(lengthList3 -", "0 for iterator_1 in range(30): iterator += 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i", "len(data_temp_in) lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for i in range(lengthList1 - 30):", "plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('pressure, kPa') plt.grid() plt.tight_layout() plt.savefig(\"../../img/termo.png\", fmt='png')", "= log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0", "for i in range(lengthList2 - 30): # усреднение значений iterator = 0 for", "range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try: data = reading_data() except BaseException:", "+ data_temp_out[i + iterator] data_temp_out[i] = data_temp_out[i] / 30 for i in range(lengthList3", "+= 1 data_pressure[i] = data_pressure[i] + data_pressure[i + iterator] data_pressure[i] = data_pressure[i] /", "lengthList3 = len(data_pressure) for i in range(lengthList1 - 30): # усреднение значений iterator", "reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure)", "и распределение их по трем другим element = log_dat[i].split(';') element[0] = float(element[1]) element[1]", "lengthList2 = len(data_temp_out) lengthList3 = len(data_pressure) for i in range(lengthList1 - 30): #", "print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure()", "i in range(lengthList1 - 30): # усреднение значений iterator = 0 for iterator_1", "and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter = 0 for i in", "внутри аппарата data_temp_out = [0] # финальный список данных о температуре окружающей среды", "inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside')", "'\\n'): func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\" функция, парсящая данные из списка, которые", "iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_out[i] = data_temp_out[i]", "plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid()", "import matplotlib.pyplot as plt def reading_data(): func_dat = [] f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r')", "список данных о температуре окружающей среды data_pressure = [0] # финальный список данных", "iterator_1 in range(30): iterator += 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator]", "data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] / 30 for", "range(lengthList1 - 30): # усреднение значений iterator = 0 for iterator_1 in range(30):", "[0] # финальный список данных о температуре внутри аппарата data_temp_out = [0] #", "float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2])", "= 0 for i in range(len(log_dat)-1): # извлечение данных из списка и распределение", "= data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] = data_temp_in[i] / 30 for i", "в другие списки \"\"\" log_dat = [] counter = 0 data_temp_in = [0]", "температуре внутри аппарата data_temp_out = [0] # финальный список данных о температуре окружающей", "counter += 1 if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1 ==", "data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1]) data_pressure.append(data_pressure[-1]) return data_temp_in, data_temp_out, data_pressure def", "for iterator_1 in range(30): iterator += 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i +", "= plt.figure() # графики изменения температуры внутри и снаружи аппарата и изменения давления", "= float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1])", "log_dat = [] counter = 0 data_temp_in = [0] # финальный список данных", "f = open(\"/home/misha91908/cube-can-sat-2016/soft/desktop/log/MEDIUM.TXT\", 'r') for line in f: if(line != '\\n'): func_dat.append(line) return", "element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except IndexError: data_temp_in.append(data_temp_in[-1]) data_temp_out.append(data_temp_out[-1])", "давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid()", "data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in)", "log_dat[i].split(';') element[0] = float(element[1]) element[1] = float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try:", "внутри и снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10')", "'3', '4', '5', '6', '7', '8', '9', ';'} for element in range(len(func_dat)-1): for", "plt.title('Temperature inside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(312) plt.plot(data_temp_out) plt.title('Temperature", "element[1] = float(element[2]) element[2] = (float(element[3]) + float(element[4]))/2.0 try: data_temp_in.append(element[0]) data_temp_out.append(element[1]) data_pressure.append(element[2]) except", "data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя) lengthList1 =", "outside') plt.xticks(fontsize='10') plt.yticks(fontsize='10') plt.xlabel('Number of points') plt.ylabel('temperature, celsius') plt.grid() plt.subplot(313) plt.plot(data_pressure) plt.title('Pressure') plt.xticks(fontsize='10')", "= [0] # финальный список данных о температуре окружающей среды data_pressure = [0]", "data = reading_data() except BaseException: print('ошибка доступа') data_temp_in, data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in,", "значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_out[i] =", "среды data_pressure = [0] # финальный список данных с датчика давления basic_elements =", "/ 30 for i in range(30): data_temp_in.pop() data_temp_out.pop() data_pressure.pop() data = [] try:", "окружающей среды data_pressure = [0] # финальный список данных с датчика давления basic_elements", "simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() # графики изменения температуры внутри и снаружи", "'4', '5', '6', '7', '8', '9', ';'} for element in range(len(func_dat)-1): for i", "data_temp_in = [0] # финальный список данных о температуре внутри аппарата data_temp_out =", "= data_temp_in[i] / 30 for i in range(lengthList2 - 30): # усреднение значений", "= 0 for iterator_1 in range(30): iterator += 1 data_pressure[i] = data_pressure[i] +", "значений iterator = 0 for iterator_1 in range(30): iterator += 1 data_temp_in[i] =", "for line in f: if(line != '\\n'): func_dat.append(line) return func_dat def sort_data(func_dat): \"\"\"", "температуре окружающей среды data_pressure = [0] # финальный список данных с датчика давления", "датчика давления basic_elements = {'.', '0', '1', '2', '3', '4', '5', '6', '7',", "температуры внутри и снаружи аппарата и изменения давления plt.subplot(311) plt.plot(data_temp_in) plt.title('Temperature inside') plt.xticks(fontsize='10')", "0 for i in range(len(log_dat)-1): # извлечение данных из списка и распределение их", "data_temp_out = [0] # финальный список данных о температуре окружающей среды data_pressure =", "simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя) lengthList1 = len(data_temp_in) lengthList2 =", "data_temp_out, data_pressure = sort_data(data) simple_moving_average(data_temp_in, data_temp_out, data_pressure) fig = plt.figure() # графики изменения", "/ 30 for i in range(lengthList2 - 30): # усреднение значений iterator =", "range(30): iterator += 1 data_temp_out[i] = data_temp_out[i] + data_temp_out[i + iterator] data_temp_out[i] =", "return data_temp_in, data_temp_out, data_pressure def simple_moving_average(data_temp_in, data_temp_out, data_pressure): # усреднение значений(простая скользящая средняя)", "counter = 0 for i in range(len(log_dat)-1): # извлечение данных из списка и", "аппарата data_temp_out = [0] # финальный список данных о температуре окружающей среды data_pressure", "if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element]) counter", "range(lengthList2 - 30): # усреднение значений iterator = 0 for iterator_1 in range(30):", "= [] counter = 0 data_temp_in = [0] # финальный список данных о", "1 if((counter == len(func_dat[element]) - 1) and (len(func_dat[element].split(';')) - 1 == 5)): log_dat.append(func_dat[element])", "считываются в другие списки \"\"\" log_dat = [] counter = 0 data_temp_in =", "for iterator_1 in range(30): iterator += 1 data_pressure[i] = data_pressure[i] + data_pressure[i +", "range(30): iterator += 1 data_temp_in[i] = data_temp_in[i] + data_temp_in[i + iterator] data_temp_in[i] =", "'5', '6', '7', '8', '9', ';'} for element in range(len(func_dat)-1): for i in", "return func_dat def sort_data(func_dat): \"\"\" функция, парсящая данные из списка, которые считываются в", "- 1): if(func_dat[element][i] in basic_elements): counter += 1 if((counter == len(func_dat[element]) - 1)" ]
[ "url in self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item", "item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name']", "= 0.9, image / webp, image / apng, * / *;q = 0.8',", "br', 'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in range(1, 40002):", "xhtml + xml, application / xml;q = 0.9, image / webp, image /", "= 'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url = url_head + '%s.html' %", "+ '%s.html' % i self.start_urls.append(url) for url in self.start_urls: print url yield scrapy.Request(url,", "item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] = content[6][content[6].index(':')+1:] item['tel'] = content[7][content[7].index(':')+1:] yield", "for url in self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response):", "= content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] = content[6][content[6].index(':')+1:] item['tel'] = content[7][content[7].index(':')+1:] yield item", "headers = { 'Accept': 'text / html, application / xhtml + xml, application", "SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = [] headers = {", "= ['qieta.com'] start_urls = [] headers = { 'Accept': 'text / html, application", "item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] = content[6][content[6].index(':')+1:] item['tel']", "application / xhtml + xml, application / xml;q = 0.9, image / webp,", "reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = []", "*;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-'", "= name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] =", "coding: utf-8 -*- import scrapy from webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8')", "response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:]", "i self.start_urls.append(url) for url in self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def", "/ webp, image / apng, * / *;q = 0.8', 'Accept-Encoding': 'deflate, br',", "response): item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')]", "'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in", "/ *;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head =", "range(1, 40002): url = url_head + '%s.html' % i self.start_urls.append(url) for url in", "'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = [] headers = { 'Accept': 'text /", "% i self.start_urls.append(url) for url in self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers)", "-*- coding: utf-8 -*- import scrapy from webspider.items import SheJiYuanItem import sys reload(sys)", "= { 'Accept': 'text / html, application / xhtml + xml, application /", "# -*- coding: utf-8 -*- import scrapy from webspider.items import SheJiYuanItem import sys", "image / apng, * / *;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'}", "/ xml;q = 0.9, image / webp, image / apng, * / *;q", "for i in range(1, 40002): url = url_head + '%s.html' % i self.start_urls.append(url)", "0.9, image / webp, image / apng, * / *;q = 0.8', 'Accept-Encoding':", "'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in range(1,", "allowed_domains = ['qieta.com'] start_urls = [] headers = { 'Accept': 'text / html,", "url_head = 'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url = url_head + '%s.html'", "application / xml;q = 0.9, image / webp, image / apng, * /", "= 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = [] headers = { 'Accept': 'text", "content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name item['foundTime']", "start_urls = [] headers = { 'Accept': 'text / html, application / xhtml", "headers=self.headers) def parse(self, response): item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0]", "= content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] =", "SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name", "import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains =", "parse(self, response): item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name =", "* / *;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head", "-*- import scrapy from webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider):", "name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type']", "sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = [] headers", "/ xhtml + xml, application / xml;q = 0.9, image / webp, image", "print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item = SheJiYuanItem() content", "= response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] =", "yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract()", "start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url = url_head +", "xml, application / xml;q = 0.9, image / webp, image / apng, *", "image / webp, image / apng, * / *;q = 0.8', 'Accept-Encoding': 'deflate,", "sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls =", "self.start_urls.append(url) for url in self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self,", "'Accept': 'text / html, application / xhtml + xml, application / xml;q =", "self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item = SheJiYuanItem()", "= response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name item['foundTime'] =", "['qieta.com'] start_urls = [] headers = { 'Accept': 'text / html, application /", "'text / html, application / xhtml + xml, application / xml;q = 0.9,", "apng, * / *;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self):", "[] headers = { 'Accept': 'text / html, application / xhtml + xml,", "url = url_head + '%s.html' % i self.start_urls.append(url) for url in self.start_urls: print", "content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] = content[6][content[6].index(':')+1:] item['tel'] = content[7][content[7].index(':')+1:]", "content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] = content[6][content[6].index(':')+1:]", "in range(1, 40002): url = url_head + '%s.html' % i self.start_urls.append(url) for url", "html, application / xhtml + xml, application / xml;q = 0.9, image /", "url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item = SheJiYuanItem() content =", "scrapy from webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name =", "utf-8 -*- import scrapy from webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class", "'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url = url_head + '%s.html' % i", "name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:]", "in self.start_urls: print url yield scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item =", "class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = [] headers =", "/ apng, * / *;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def", "webp, image / apng, * / *;q = 0.8', 'Accept-Encoding': 'deflate, br', 'Conection':", "def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url = url_head", "scrapy.Request(url, callback=self.parse, headers=self.headers) def parse(self, response): item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name", "= name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] =", "/ html, application / xhtml + xml, application / xml;q = 0.9, image", "0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i", "import scrapy from webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name", "name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls = [] headers = { 'Accept':", "i in range(1, 40002): url = url_head + '%s.html' % i self.start_urls.append(url) for", "name = name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area']", "= [] headers = { 'Accept': 'text / html, application / xhtml +", "name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:]", "def parse(self, response): item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name", "= url_head + '%s.html' % i self.start_urls.append(url) for url in self.start_urls: print url", "= 0.8', 'Accept-Encoding': 'deflate, br', 'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for", "= SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name = response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] =", "webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains", "'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url =", "SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com']", "url_head + '%s.html' % i self.start_urls.append(url) for url in self.start_urls: print url yield", "'%s.html' % i self.start_urls.append(url) for url in self.start_urls: print url yield scrapy.Request(url, callback=self.parse,", "+ xml, application / xml;q = 0.9, image / webp, image / apng,", "= content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts'] = content[6][content[6].index(':')+1:] item['tel'] =", "response.xpath('//h1[@class=\"title\"]/text()').extract()[0] name = name[:name.index('(')] item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:]", "from webspider.items import SheJiYuanItem import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider'", "'Conection': 'keep-alive'} def start_requests(self): url_head = 'http://old.qieta.com/engineering/show-' for i in range(1, 40002): url", "callback=self.parse, headers=self.headers) def parse(self, response): item = SheJiYuanItem() content = response.xpath('//div[@class=\"content\"]/div/text()').extract() name =", "{ 'Accept': 'text / html, application / xhtml + xml, application / xml;q", "import sys reload(sys) sys.setdefaultencoding('utf-8') class SheJiYuanSpider(scrapy.Spider): name = 'SheJiYuanSpider' allowed_domains = ['qieta.com'] start_urls", "item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address'] = content[4][content[4].index(':')+1:] item['contacts']", "40002): url = url_head + '%s.html' % i self.start_urls.append(url) for url in self.start_urls:", "item['name'] = name item['foundTime'] = content[1][content[1].index(':')+1:] item['type'] = content[2][content[2].index(':')+1:] item['area'] = content[3][content[3].index(':')+1:] item['address']", "xml;q = 0.9, image / webp, image / apng, * / *;q =" ]
[ "rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores =", "tile.letter == ch) expectedScore = expectedScore + t.score expectedScore = expectedScore * 2", "expectedScore = 0 for ch in word: t = next(tile for tile in", "tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase): def", "TestCase from context import * import pdb import pprint def write(l): w =", "Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji')", "= self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True, \"should", "<reponame>deostroll/sengine from unittest import TestCase from context import * import pdb import pprint", "unittest import TestCase from context import * import pdb import pprint def write(l):", "+ pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase): def setUp(self): self.board = Board(15)", "game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def", "def test_first_turn_score_exception(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters)", "player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb =", "tiles and their positions # on the board def createTile(l): return Tile(l, game.letter_scores[l])", "rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores =", "def test_first_turn_positive(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c')", "self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb", "lqueue is the queue of tiles and their positions # on the board", "= self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') #", "self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res =", "game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'],", "game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False, \"should not accept\") def", "= game.endTurn() self.assertTrue(res['result'] == False, \"should not accept\") def test_first_turn_positive(self): game = self.game", "self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self): game = self.game player = self.player rackLetters", "'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0 for ch in word: t", "self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] ==", "= game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self): game = self.game # lqueue", "Board(15) self.game = Game(self.board) rack = Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def", "w = open('debug.txt', 'a') w.write(str(l) + '\\n') w.close() def testWrap(tc, func): def execute(inp,", "game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6,", "return execute class WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game = Game(self.board) rack", "self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn()", "next(tile for tile in tileDb['tiles'] if tile.letter == ch) expectedScore = expectedScore +", "game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0 for ch in word: t =", "expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game player", "= self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result']", "game.setPosition((6, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' res =", "'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self): game = self.game", "= game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self): game = self.game player =", "'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should", "game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should accept turn\")", "for ch in word: t = next(tile for tile in tileDb['tiles'] if tile.letter", "[ (createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)),", "6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)), ] score =", "player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res", "player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb =", "= 0 for ch in word: t = next(tile for tile in tileDb['tiles']", "rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal')", "loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should be", "the board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)),", "tileDb['tiles'] if tile.letter == ch) expectedScore = expectedScore + t.score expectedScore = expectedScore", "self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game player = self.player rackLetters = 'sinaete'", "= expectedScore + t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self):", "(createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)), ]", "self.assertTrue(result['result'] == True, \"should fail\") def test_first_turn_negative(self): game = self.game player = self.player", "expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game player = self.player", "word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self): game", "execute class WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game = Game(self.board) rack =", "import * import pdb import pprint def write(l): w = open('debug.txt', 'a') w.write(str(l)", "game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense')", "self.game # lqueue is the queue of tiles and their positions # on", "= 'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self): game =", "player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True,", "word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0 for ch in", "rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense'", "game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should accept", "Game(self.board) rack = Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game =", "def test_compute_lqueue_score(self): game = self.game # lqueue is the queue of tiles and", "(createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)), ] score = game._computeQueue(lqueue)", "# lqueue is the queue of tiles and their positions # on the", "import TestCase from context import * import pdb import pprint def write(l): w", "else: func(inp) return execute class WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game =", "self.game.setPlayer(self.player) def test_word_play(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5))", "def write(l): w = open('debug.txt', 'a') w.write(str(l) + '\\n') w.close() def testWrap(tc, func):", "4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore =", "== True, \"should fail\") def test_first_turn_negative(self): game = self.game player = self.player game.fillRack(player.rack,", "turn\") def test_first_turn_score(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack,", "game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word =", "game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\") def test_first_turn_negative(self): game", "\"should not accept\") def test_first_turn_positive(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji')", "test_first_turn_score(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal')", "== ch) expectedScore = expectedScore + t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore,", "pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self): game = self.game", "= loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore", "test_compute_lqueue_score(self): game = self.game # lqueue is the queue of tiles and their", "game = self.game # lqueue is the queue of tiles and their positions", "\"fail: \\n\" + pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase): def setUp(self): self.board", "game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i')", "positions # on the board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [", "0 for ch in word: t = next(tile for tile in tileDb['tiles'] if", "game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self): game", "7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)), ] score = game._computeQueue(lqueue) self.assertEquals(score, 10)", "for tile in tileDb['tiles'] if tile.letter == ch) expectedScore = expectedScore + t.score", "from context import * import pdb import pprint def write(l): w = open('debug.txt',", "= self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles()", "game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' res", "false\") def test_compute_lqueue_score(self): game = self.game # lqueue is the queue of tiles", "self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores", "tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score']", "= expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game player =", "* 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game player = self.player rackLetters", "board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)), (createTile('e'),", "= 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score']", "res = game.endTurn() self.assertTrue(res['result'] == False, \"should not accept\") def test_first_turn_positive(self): game =", "game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False,", "(createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'),", "hasReturn: actual = func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals())) else: func(inp) return", "game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word =", "'\\n') w.close() def testWrap(tc, func): def execute(inp, expected, hasReturn=True): if hasReturn: actual =", "game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False, \"should not accept\") def test_first_turn_positive(self):", "game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self): game = self.game # lqueue is", "t = next(tile for tile in tileDb['tiles'] if tile.letter == ch) expectedScore =", "result = game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\") def test_first_turn_negative(self): game = self.game", "expectedScore = expectedScore + t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore) def", "game.playLetter('h') game.playLetter('i') # pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self):", "tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0 for ch", "from unittest import TestCase from context import * import pdb import pprint def", "= game.getCurrentScore()['score'] expectedScore = 0 for ch in word: t = next(tile for", "self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace()", "\\n\" + pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase): def setUp(self): self.board =", "game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7,", "if hasReturn: actual = func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals())) else: func(inp)", "tile_scores = tileDb['letter_score'] word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should be false\")", "+ t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game =", "func): def execute(inp, expected, hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual, expected, \"fail:", "res = game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self): game = self.game player", "w.write(str(l) + '\\n') w.close() def testWrap(tc, func): def execute(inp, expected, hasReturn=True): if hasReturn:", "the queue of tiles and their positions # on the board def createTile(l):", "game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi')", "= 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0 for ch in word:", "'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\") def test_first_turn_negative(self):", "game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False, \"should not", "game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\") def", "\"should be false\") def test_compute_lqueue_score(self): game = self.game # lqueue is the queue", "game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i')", "accept\") def test_first_turn_positive(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5))", "False, \"should not accept\") def test_first_turn_positive(self): game = self.game player = self.player game.fillRack(player.rack,", "rack = Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game", "game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self): game = self.game player = self.player", "expected, hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals()))", "def test_first_turn_score(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters)", "test_first_turn_positive(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h')", "return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'),", "context import * import pdb import pprint def write(l): w = open('debug.txt', 'a')", "rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense'", "func(inp) return execute class WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game = Game(self.board)", "expectedScore + t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game", "test_first_turn_negative(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h')", "self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\")", "game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)),", "= game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\") def test_first_turn_negative(self): game = self.game player", "= 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score']", "self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] ==", "queue of tiles and their positions # on the board def createTile(l): return", "import pdb import pprint def write(l): w = open('debug.txt', 'a') w.write(str(l) + '\\n')", "loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore =", "= next(tile for tile in tileDb['tiles'] if tile.letter == ch) expectedScore = expectedScore", "lqueue = [ (createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'),", "tile in tileDb['tiles'] if tile.letter == ch) expectedScore = expectedScore + t.score expectedScore", "if tile.letter == ch) expectedScore = expectedScore + t.score expectedScore = expectedScore *", "(7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)), ] score = game._computeQueue(lqueue) self.assertEquals(score,", "be false\") def test_compute_lqueue_score(self): game = self.game # lqueue is the queue of", "game.getCurrentScore()['score'] expectedScore = 0 for ch in word: t = next(tile for tile", "\"should accept turn\") def test_first_turn_score(self): game = self.game player = self.player rackLetters =", "their positions # on the board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue =", "res = game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self): game = self.game #", "def setUp(self): self.board = Board(15) self.game = Game(self.board) rack = Rack(7) self.player =", "= self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4))", "Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game player =", "= self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') # pdb.set_trace() res =", "= loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should", "test_word_play(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result =", "# pdb.set_trace() res = game.endTurn() self.assertTrue(res['result'], \"should accept turn\") def test_first_turn_score(self): game =", "setUp(self): self.board = Board(15) self.game = Game(self.board) rack = Rack(7) self.player = Player('dumy',", "game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False, \"should not accept\") def test_first_turn_positive(self): game", "in word: t = next(tile for tile in tileDb['tiles'] if tile.letter == ch)", "self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb", "t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game", "= self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4))", "on the board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7,", "'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word", "open('debug.txt', 'a') w.write(str(l) + '\\n') w.close() def testWrap(tc, func): def execute(inp, expected, hasReturn=True):", "fail\") def test_first_turn_negative(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5))", "expected, \"fail: \\n\" + pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase): def setUp(self):", "actual = func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals())) else: func(inp) return execute", "tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0", "5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)),", "self.assertTrue(res['result'] == False, \"should not accept\") def test_first_turn_positive(self): game = self.game player =", "def execute(inp, expected, hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual, expected, \"fail: \\n\"", "w.close() def testWrap(tc, func): def execute(inp, expected, hasReturn=True): if hasReturn: actual = func(inp)", "pprint def write(l): w = open('debug.txt', 'a') w.write(str(l) + '\\n') w.close() def testWrap(tc,", "game.setPosition((7, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore", "write(l): w = open('debug.txt', 'a') w.write(str(l) + '\\n') w.close() def testWrap(tc, func): def", "expectedScore) def test_first_turn_score_exception(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack,", "test_first_turn_score_exception(self): game = self.game player = self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal')", "* import pdb import pprint def write(l): w = open('debug.txt', 'a') w.write(str(l) +", "def test_first_turn_negative(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c')", "accept turn\") def test_first_turn_score(self): game = self.game player = self.player rackLetters = 'sinaete'", "in tileDb['tiles'] if tile.letter == ch) expectedScore = expectedScore + t.score expectedScore =", "+ '\\n') w.close() def testWrap(tc, func): def execute(inp, expected, hasReturn=True): if hasReturn: actual", "'a') w.write(str(l) + '\\n') w.close() def testWrap(tc, func): def execute(inp, expected, hasReturn=True): if", "= Board(15) self.game = Game(self.board) rack = Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player)", "= self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res", "not accept\") def test_first_turn_positive(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal')", "ch) expectedScore = expectedScore + t.score expectedScore = expectedScore * 2 self.assertEqual(actualScore, expectedScore)", "= self.game # lqueue is the queue of tiles and their positions #", "class WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game = Game(self.board) rack = Rack(7)", "is the queue of tiles and their positions # on the board def", "actualScore = game.getCurrentScore()['score'] expectedScore = 0 for ch in word: t = next(tile", "import pprint def write(l): w = open('debug.txt', 'a') w.write(str(l) + '\\n') w.close() def", "hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals())) else:", "= Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game player = self.player game.fillRack(player.rack,", "2 self.assertEqual(actualScore, expectedScore) def test_first_turn_score_exception(self): game = self.game player = self.player rackLetters =", "execute(inp, expected, hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" +", "= tileDb['letter_score'] word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def", "tileDb['letter_score'] word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self):", "== False, \"should not accept\") def test_first_turn_positive(self): game = self.game player = self.player", "= self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((7, 4)) tileDb = loadTiles()", "tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' res = game.playWord('tense') self.assertFalse(res['result'],", "(7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7,", "(7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7, 8)), (createTile('e'), (7, 9)), ] score", "self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game player = self.player", "WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game = Game(self.board) rack = Rack(7) self.player", "= func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals())) else: func(inp) return execute class", "self.game = Game(self.board) rack = Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self):", "= tileDb['letter_score'] word = 'tense' game.playWord('tense') actualScore = game.getCurrentScore()['score'] expectedScore = 0 for", "4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word = 'tense' res = game.playWord('tense')", "word: t = next(tile for tile in tileDb['tiles'] if tile.letter == ch) expectedScore", "ch in word: t = next(tile for tile in tileDb['tiles'] if tile.letter ==", "self.assertFalse(res['result'], \"should be false\") def test_compute_lqueue_score(self): game = self.game # lqueue is the", "# on the board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'),", "player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn()", "Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7,", "= open('debug.txt', 'a') w.write(str(l) + '\\n') w.close() def testWrap(tc, func): def execute(inp, expected,", "= Game(self.board) rack = Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game", "game.endTurn() self.assertTrue(res['result'] == False, \"should not accept\") def test_first_turn_positive(self): game = self.game player", "= Rack(7) self.player = Player('dumy', rack) self.game.setPlayer(self.player) def test_word_play(self): game = self.game player", "self.player rackLetters = 'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores", "= self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result']", "'sinaete' game.fillRack(player.rack, rackLetters) game.setOrientation('horizontal') game.setPosition((6, 4)) tileDb = loadTiles() tile_scores = tileDb['letter_score'] word", "func(inp) tc.assertEqual(actual, expected, \"fail: \\n\" + pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase):", "def testWrap(tc, func): def execute(inp, expected, hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual,", "game.setPosition((7,5)) result = game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\") def test_first_turn_negative(self): game =", "testWrap(tc, func): def execute(inp, expected, hasReturn=True): if hasReturn: actual = func(inp) tc.assertEqual(actual, expected,", "True, \"should fail\") def test_first_turn_negative(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji')", "game.playWord('chi') self.assertTrue(result['result'] == True, \"should fail\") def test_first_turn_negative(self): game = self.game player =", "pprint.pformat(locals())) else: func(inp) return execute class WordPlayTests(TestCase): def setUp(self): self.board = Board(15) self.game", "self.board = Board(15) self.game = Game(self.board) rack = Rack(7) self.player = Player('dumy', rack)", "= [ (createTile('t'), (7, 5)), (createTile('e'), (7, 6)), (createTile('n'), (7, 7)), (createTile('s'), (7,", "\"should fail\") def test_first_turn_negative(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal')", "'cxghiji') game.setOrientation('horizontal') game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False, \"should", "game.setPosition((6,5)) game.playLetter('c') game.playLetter('h') game.playLetter('i') res = game.endTurn() self.assertTrue(res['result'] == False, \"should not accept\")", "pdb import pprint def write(l): w = open('debug.txt', 'a') w.write(str(l) + '\\n') w.close()", "def test_word_play(self): game = self.game player = self.player game.fillRack(player.rack, 'cxghiji') game.setOrientation('horizontal') game.setPosition((7,5)) result", "def createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)), (createTile('e'), (7,", "and their positions # on the board def createTile(l): return Tile(l, game.letter_scores[l]) lqueue", "of tiles and their positions # on the board def createTile(l): return Tile(l,", "createTile(l): return Tile(l, game.letter_scores[l]) lqueue = [ (createTile('t'), (7, 5)), (createTile('e'), (7, 6))," ]
[ "geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS',", "'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23',", "true #'line_file':'../input/lines.csv', #maybe is better to take out the function to_GIS from pyamesh", "'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN',", "'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21',", "16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30,", "'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just one plot", "2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\", "2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50],", "run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the ahuachapan model has another mesh setup", "10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2',", ", 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11,", "'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02},", "'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25',", "'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS',", "'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'',", "plot #to_GIS and plot_all_GIS it plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe is", "one plot #to_GIS and plot_all_GIS it plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe", "'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19',", "} #to_GIS does just one plot #to_GIS and plot_all_GIS it plots everything #try", "'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37,", "'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS',", "12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6},", "10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS',", "'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24',", "#try polygon true #'line_file':'../input/lines.csv', #maybe is better to take out the function to_GIS", "'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04,", "'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37,", "'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82,", "'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV',", "'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32',", "'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6},", "'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1),", "'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22',", "'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B',", "'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83,", "'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV',", "'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06", "'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14',", "'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30],", "'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\", "'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'],", "#'line_file':'../input/lines.csv', #maybe is better to take out the function to_GIS from pyamesh and", "to take out the function to_GIS from pyamesh and run it alone #For", "'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06", "and plot_all_GIS it plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe is better to", "'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'',", "'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08,", "'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 ,", "'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV',", "'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2',", "'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A',", "'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False,", "import numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100],", "'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50,", "'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06", "#to_GIS does just one plot #to_GIS and plot_all_GIS it plots everything #try polygon", "import datetime, timedelta import numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db',", "and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the ahuachapan model has another mesh", "'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02},", "'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6},", "'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16,", "}, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6',", "'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1,", "'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250,", "'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B',", "'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6,", "'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000,", "input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60],", "'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10,", "'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A',", "'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1),", "], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000,", "datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100,", "17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6},", "datetime, timedelta import numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100],", "}, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1',", "plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe is better to take out the", "'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS',", "'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4,", "'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False,", "'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5',", "'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'],", "'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, },", "6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\", "'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29',", "'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\", "'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02},", "'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1',", "#'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500,", "'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS", "'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16',", "'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5,", "'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B',", "'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 ,", "'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C',", "7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}}", "'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4',", "'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 },", "'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC',", "just one plot #to_GIS and plot_all_GIS it plots everything #try polygon true #'line_file':'../input/lines.csv',", "'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC',", "'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4',", "'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B',", "'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{", "'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False,", "'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\", "'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV',", "{'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)},", "the function to_GIS from pyamesh and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the", "'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34',", ", 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11,", "'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9',", "'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A',", "}, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2,", "18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN',", "'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03,", "'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1,", ", 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250,", "is better to take out the function to_GIS from pyamesh and run it", "timedelta import numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B',", "'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28',", "'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37,", "19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12,", "'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8',", "'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[", "better to take out the function to_GIS from pyamesh and run it alone", "'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False,", "'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just one plot #to_GIS and plot_all_GIS", "out the function to_GIS from pyamesh and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit", "'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\", "'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6},", "'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True ,", "'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13',", "'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False,", "'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\", "'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS',", "take out the function to_GIS from pyamesh and run it alone #For amesh", "'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5,", "'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A',", "'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just one", "'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True,", "3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\", "'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250,", "8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN',", "'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1',", "'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000,", "'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20',", "100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250],", "'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just one plot #to_GIS", "'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC',", "'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C',", "'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37,", "'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A',", "'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\", "'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2,", "'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13,", "'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18',", "'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14,", "'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\", "'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B',", "'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D',", "TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] },", "to_GIS from pyamesh and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the ahuachapan model", "'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A',", "} #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300,", "'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A',", "'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06", "'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65],", "mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500,", "'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC',", "'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS',", "'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30', 'AH-31',", "'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27',", ", 'HG':1.000E+02}, } #to_GIS does just one plot #to_GIS and plot_all_GIS it plots", "'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65],", "'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3',", "'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True,", "'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\", "'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 ,", "'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV',", "'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True", "'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN',", "'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], }", "'AH-28', 'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B',", "'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000,", "'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B',", "5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200],", "'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36',", "everything #try polygon true #'line_file':'../input/lines.csv', #maybe is better to take out the function", "'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ],", "'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3,", "'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6", "'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000,", "}, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6,", "does just one plot #to_GIS and plot_all_GIS it plots everything #try polygon true", "'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just one plot #to_GIS and", "'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True,", ", 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11,", "11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37,", "'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9',", "'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{", "125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400],", "13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1,", "7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]},", "'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\", "'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000, 'Ymin':302000,", "'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 },", "datetime import datetime, timedelta import numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600,", "'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100],", "3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200],", "15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100,", "'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, },", "'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A',", "'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, },", "11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production',", "6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R',", "'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\", "'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15,", "'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{", "it plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe is better to take out", "'AH-15', 'AH-16', 'AH-16A', 'AH-17', 'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26',", "'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 ,", "'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{", "'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA795':{'SL':'GEN',", "#maybe is better to take out the function to_GIS from pyamesh and run", "'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10', 'CH-7',", "'HG':1.000E+02}, } #to_GIS does just one plot #to_GIS and plot_all_GIS it plots everything", "'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11,", "'Ymin':302000, 'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150,", "'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C', 'AH-35D', 'AH-36', 'CH-1', 'CH-10',", "'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 ,", "'Ymax':322000, 'x_from_boarder':1000, 'y_from_boarder':1000, 'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv',", "'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06", "4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400],", "15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS',", "17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100,", "'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A',", "'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10,", "'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 ,", "60], 5:['E',30], 6:['F',65], 7:['G',40], 8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200],", "polygon true #'line_file':'../input/lines.csv', #maybe is better to take out the function to_GIS from", "8:['H',65], 9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test", "16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37,", "'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just one plot #to_GIS and plot_all_GIS it", "'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN',", "'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\", "5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\", "'AH-36', 'CH-1', 'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B',", "'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37,", "'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C',", "'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\", \"set_inac_from_poly\":False, 'set_inac_from_inner':True,", "pyamesh and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the ahuachapan model has another", "}, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30,", "np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D',", "'CH-10', 'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B',", "'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1,", "13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN',", "9:['I',30], 10:['J',100], 11:['K',50], 12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output", "\"set_inac_from_poly\":False, 'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\", "'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15',", "'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R'", "'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0, 'ICP3':1.0,", "'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85,", "'CH-7', 'CH-7BIS', 'CH-8', 'CH-9', 'CH-9A', 'CH-9B', 'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C',", "'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA865':{'SL':'SRC', 'NS':84,", "4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\", "'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV',", "timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4', 'OPROCS':'O4', 'RITMAX':0.04, 'CLOSUR':1E-6, }, 'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20", "as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125],", "'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does", "18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350,", "'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC',", "'CH-A'], 'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2',", "'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11,", ", 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11,", "'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV',", "'MAKE_UP_WELLS':[ 'ZAH-37A', 'ZAH-37B', 'ZAH-38A', 'ZAH-38B', 'ZAH-38C', 'ZAH-39A', 'ZAH-39B', 'ZAH-39C', 'XCH-9C', 'XCH-D1', 'XCH-D2', 'XCH-12A',", "numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C',", "'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7',", "'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA897':{'SL':'SRC', 'NS':83, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02},", "'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000, 'Xmax':424000,", "'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6},", "'NS':84, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02},", "'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False,", "from pyamesh and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the ahuachapan model has", "100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1,", "'AH-29', 'AH-30', 'AH-31', 'AH-32', 'AH-33A', 'AH-33B', 'AH-33C', 'AH-34', 'AH-34A', 'AH-34B', 'AH-35A', 'AH-35B', 'AH-35C',", "'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{ 'MATSLV':5, 'ZPROCS':'Z4',", "'T_DEV':5, 'P_DEV':10, 'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8',", "'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30], 6:['F',65], 7:['G',40],", "'x_space':2000, 'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1,", "'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87,", "'QA763':{'SL':'GEN', 'NS':11, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA839':{'SL':'GEN', 'NS':12, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS',", "'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02},", "'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02},", "'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12', 'AH-13', 'AH-14', 'AH-15', 'AH-16', 'AH-16A', 'AH-17',", "}, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11', 'AH-12',", "'h_DEV':200, }, 'WELLS':['AH-1', 'AH-2', 'AH-3', 'AH-4', 'AH-4BIS', 'AH-5', 'AH-6', 'AH-7', 'AH-8', 'AH-9', 'AH-11',", "from datetime import datetime, timedelta import numpy as np input_data={'incon_state':'current', 'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0),", "'toler':0.1, 'layer_to_plot':1, 'plot_names':False, 'plot_centers':False, 'plot_layer':False, 'to_steinar':True, 'to_GIS':False, 'plot_all_GIS':False, 'from_leapfrog':False, 'line_file':'', 'fault_distance':50, 'with_polygon':True, 'polygon_shape':\"../input/area/polygon.shp\",", "12:['L',250], 13:['M',200], 14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS':", "'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC', 'NS':82, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06", "9:'cyan',\\ 10:'magenta',\\ 11:'#faebd7',\\ 12:'#2e8b57',\\ 13:'#eeefff',\\ 14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10,", "'AH-18', 'AH-19', 'AH-20', 'AH-21', 'AH-22', 'AH-23', 'AH-24', 'AH-25', 'AH-26', 'AH-27', 'AH-28', 'AH-29', 'AH-30',", "plot_all_GIS it plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe is better to take", "'ICP2':0.0, 'ICP3':1.0, }, 'MULTI':{ 'NK':1, 'NEQ':2, 'NPH':2, 'NB':6 }, 'IT2':{ 'T_DEV':5, 'P_DEV':10, 'h_DEV':200,", "'XCH-D1', 'XCH-D2', 'XCH-12A', 'XCH-12B', 'XCH-8A', 'XCH-8B', ], 'NOT_PRODUCING_WELL':['CH-D'], } #'XAH-2R' mesh_setup={'mesh_creation':True , 'Xmin':404000,", "output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000]", "'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA896':{'SL':'SRC', 'NS':85, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86,", "'EOS':1, 'source_txt':'../input/', 'ref_date':datetime(1975,1,1,0,0,0), 'z_ref':600, 'db_path':'../input/model_month.db', 'LAYERS':{1:['A',100], 2:['B', 100], 3:['C', 125], 4:['D', 60], 5:['E',30],", "#to_GIS and plot_all_GIS it plots everything #try polygon true #'line_file':'../input/lines.csv', #maybe is better", "'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1, 'DELTEN_LIST':[10,30,50,1000,10000,10000] }, 'TIMES':{'TIMES_N':np.arange(datetime(1985,7,1), datetime(2015,7,1), timedelta(days=120)).astype(datetime)}, 'SOLVR':{", "'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 ,", "'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA866':{'SL':'SRC',", "'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06", "'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2, 'MCYC':100, 'MCYPR':30, 'P':100, 'T':350, 'X':0.1, 'DELTEN':-1,", ", 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11,", "'y_space':2000, 'x_gap_min':411300, 'x_gap_max':418500, 'y_gap_min':304500, 'y_gap_max':311250, 'x_gap_space':250, 'y_gap_space':250, 'radius_criteria':150, 'filename':'../input/well_feedzone_xyz.csv', 'filepath':'', 'toler':0.1, 'layer_to_plot':1, 'plot_names':False,", "'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA831':{'SL':'SRC', 'NS':86, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC',", ", 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, } #to_GIS does just", "'EX':1.1E6}, 'QA795':{'SL':'GEN', 'NS':15, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA761':{'SL':'GEN', 'NS':16, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'EA833':{'SL':'SRC', 'NS':81,", "function to_GIS from pyamesh and run it alone #For amesh https://askubuntu.com/questions/454253/how-to-run-32-bit-app-in-ubuntu-64-bit #the ahuachapan", "'INCONS_PARAM':{ 'To':30, 'GRADTZ':0.08, 'DEPTH_TO_SURF':100, 'DELTAZ':20 }, 'RPCAP':{ 'IRP':3, 'RP1':0.4, 'RP2':0.03, 'ICP':1, 'ICP1':1.0E6, 'ICP2':0.0,", "'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA762':{'SL':'GEN', 'NS':13, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA796':{'SL':'GEN', 'NS':14, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6},", "14:['N',400], 15:['O',400], 16:['P',200], 17:['Q',200], 18:['R', 100]}, 'TITLE':'Test output TOUGH2', 'TYPE_RUN':'production', 'PARAMETERS': {'NOITE':1, 'KDATA':2,", "'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, 'EA864':{'SL':'SRC', 'NS':87, 'TYPE':'DELV', 'GX':5.000E-11, 'EX':1.500E+06 , 'HG':1.000E+02}, }", "'set_inac_from_inner':True, 'angle':10, 'rotate':True, 'colors':{1:'red',\\ 2:'white',\\ 3:'yellow',\\ 4:'blue',\\ 5:'green',\\ 6:'purple',\\ 7:'#ff69b4',\\ 8:'darkorange',\\ 9:'cyan',\\ 10:'magenta',\\", "14:'#da70d6',\\ 15:'#ff7f50',\\ 16:'#cd853f',\\ 17:'#bc8f8f',\\ 18:'#5f9ea0',\\ 19:'#daa520'}} geners={'QA797':{'SL':'GEN', 'NS':10, 'TYPE':'MASS', 'GX':37, 'EX':1.1E6}, 'QA763':{'SL':'GEN', 'NS':11," ]
[ "in genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story != []: for fandom in", "story_count.append(count_genre) for fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom,", "for story in selection: rating = story['rating'] genres_in_story = story.get('genres') if genres_in_story !=", "in authors_list: count = stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author, \"total\": count} story_count.append(count_author)", "[] authors = [] if session.get('is_adult') == True: selection = stories_collection.find() else: selection", "flash, session from flask_pymongo import PyMongo from datetime import date, datetime app =", "= mongo.db.stories users_collection = mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type", "genre fandoms_in_story = story.get('fandoms') if fandoms_in_story != []: for fandom in fandoms_in_story: fandom", "[] if session.get('is_adult') == True: selection = stories_collection.find() else: selection = stories_collection.find( {\"rating\":", "calculate_age(born): today = date.today() bday = datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year", "\"total\": count} story_count.append(count_genre) for fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom =", "fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for", "def list_by_type(): list_by_type = {} ratings = [] genres = [] fandoms =", "selection = stories_collection.find() else: selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story", "admins.\") def calculate_age(born): today = date.today() bday = datetime.strptime(born, '%Y-%m-%d') age = today.year", "story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\":", "date.today() bday = datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year - ((today.month, today.day)", "list_by_type(): list_by_type = {} ratings = [] genres = [] fandoms = []", "= stories_collection.find() else: selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in", "list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating in ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating", "= list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating in ratings_list: count = stories_collection.count_documents({\"rating\": rating})", "fandom, \"total\": count} story_count.append(count_fandom) for author in authors_list: count = stories_collection.count_documents({\"author\": author}) count_author", "this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True)", "os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection =", "story.get('genres') if genres_in_story != []: for genre in genres_in_story: genre fandoms_in_story = story.get('fandoms')", "= {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for author in authors_list: count = stories_collection.count_documents({\"author\":", "= os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection", "if session.get('is_adult') == True: selection = stories_collection.find() else: selection = stories_collection.find( {\"rating\": {\"$nin\":", "datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY')", "story.get('fandoms') if fandoms_in_story != []: for fandom in fandoms_in_story: fandom else: fandom =", "fandom = \"Fandom not added\" author = story['author'] if rating not in ratings:", "story_count.append(count_rating) for genre in genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre,", "import os from flask import Flask, app, flash, session from flask_pymongo import PyMongo", "to admins.\") def calculate_age(born): today = date.today() bday = datetime.strptime(born, '%Y-%m-%d') age =", "in selection: rating = story['rating'] genres_in_story = story.get('genres') if genres_in_story != []: for", "\"total\": count} story_count.append(count_author) return story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push':", "!= []: for genre in genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story !=", "fandoms = [] authors = [] if session.get('is_adult') == True: selection = stories_collection.find()", "if fandom not in fandoms: fandoms.append(fandom) if author not in authors: authors.append(author) list_by_type.update({\"ratings\":", "genre in genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre, \"total\": count}", "ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\": authors}) return list_by_type def story_count(): story_count =", "= list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating", "for fandom in fandoms_in_story: fandom else: fandom = \"Fandom not added\" author =", "PyMongo from datetime import date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"]", "authors = [] if session.get('is_adult') == True: selection = stories_collection.find() else: selection =", "list_by_type()[\"authors\"] for rating in ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating,", "= stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection: rating = story['rating']", "genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre, \"total\": count} story_count.append(count_genre) for", "users_collection = mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type = {}", "fandoms_in_story = story.get('fandoms') if fandoms_in_story != []: for fandom in fandoms_in_story: fandom else:", "Flask, app, flash, session from flask_pymongo import PyMongo from datetime import date, datetime", "\"\"\"Helper functions\"\"\" def list_by_type(): list_by_type = {} ratings = [] genres = []", "genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story != []: for fandom in fandoms_in_story:", "in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\": authors}) return list_by_type", "fandoms: fandoms.append(fandom) if author not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\":", "app, flash, session from flask_pymongo import PyMongo from datetime import date, datetime app", "{\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection: rating = story['rating'] genres_in_story =", "session from flask_pymongo import PyMongo from datetime import date, datetime app = Flask(__name__)", "fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom, \"total\": count}", "in fandoms: fandoms.append(fandom) if author not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres,", "reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}},", "list_by_type def story_count(): story_count = [] ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list", "rating = story['rating'] genres_in_story = story.get('genres') if genres_in_story != []: for genre in", "\"total\": count} story_count.append(count_fandom) for author in authors_list: count = stories_collection.count_documents({\"author\": author}) count_author =", "genres: genres.append(genre) if fandom not in fandoms: fandoms.append(fandom) if author not in authors:", "fandom not in fandoms: fandoms.append(fandom) if author not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings,", "= story['author'] if rating not in ratings: ratings.append(rating) if genre not in genres:", "return story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item,", "= {} ratings = [] genres = [] fandoms = [] authors =", "fandom}) count_fandom = {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for author in authors_list: count", "{\"rating\": rating, \"total\": count} story_count.append(count_rating) for genre in genres_list: count = stories_collection.count_documents({\"genres\": genre})", "= {\"author\": author, \"total\": count} story_count.append(count_author) return story_count def report(item, reason_given, this_story, reported_by):", "= os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection", "stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre, \"total\": count} story_count.append(count_genre) for fandom in fandoms_list:", "report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\":", "story['author'] if rating not in ratings: ratings.append(rating) if genre not in genres: genres.append(genre)", "count} story_count.append(count_author) return story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\":", "{'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent to", "return flash(\"Report sent to admins.\") def calculate_age(born): today = date.today() bday = datetime.strptime(born,", "= story.get('fandoms') if fandoms_in_story != []: for fandom in fandoms_in_story: fandom else: fandom", "fandoms.append(fandom) if author not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms,", "app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories", "authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\": authors}) return list_by_type def", "if rating not in ratings: ratings.append(rating) if genre not in genres: genres.append(genre) if", "True: selection = stories_collection.find() else: selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for", "stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\" def list_by_type():", "def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by,", "{\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection: rating = story['rating'] genres_in_story = story.get('genres')", "flash(\"Report sent to admins.\") def calculate_age(born): today = date.today() bday = datetime.strptime(born, '%Y-%m-%d')", "= {\"rating\": rating, \"total\": count} story_count.append(count_rating) for genre in genres_list: count = stories_collection.count_documents({\"genres\":", "in genres: genres.append(genre) if fandom not in fandoms: fandoms.append(fandom) if author not in", "authors_list = list_by_type()[\"authors\"] for rating in ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating =", "datetime import date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI')", "story_count.append(count_author) return story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\":", "[\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection: rating = story['rating'] genres_in_story = story.get('genres') if", "\"authors\": authors}) return list_by_type def story_count(): story_count = [] ratings_list = list_by_type()[\"ratings\"] genres_list", "genres.append(genre) if fandom not in fandoms: fandoms.append(fandom) if author not in authors: authors.append(author)", "\"total\": count} story_count.append(count_rating) for genre in genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre =", "def story_count(): story_count = [] ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list =", "{\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent to admins.\")", "not in genres: genres.append(genre) if fandom not in fandoms: fandoms.append(fandom) if author not", "reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent to admins.\") def calculate_age(born): today =", "for genre in genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story != []: for", "in ratings: ratings.append(rating) if genre not in genres: genres.append(genre) if fandom not in", "= Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo =", "list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating in ratings_list: count =", "selection: rating = story['rating'] genres_in_story = story.get('genres') if genres_in_story != []: for genre", "today = date.today() bday = datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year -", "flask_pymongo import PyMongo from datetime import date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] =", "story['rating'] genres_in_story = story.get('genres') if genres_in_story != []: for genre in genres_in_story: genre", "authors}) return list_by_type def story_count(): story_count = [] ratings_list = list_by_type()[\"ratings\"] genres_list =", "count_fandom = {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for author in authors_list: count =", "added\" author = story['author'] if rating not in ratings: ratings.append(rating) if genre not", "story in selection: rating = story['rating'] genres_in_story = story.get('genres') if genres_in_story != []:", "age = today.year - bday.year - ((today.month, today.day) < (bday.month, bday.day)) return age", "\"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\" def", "= {\"genre\": genre, \"total\": count} story_count.append(count_genre) for fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\":", "for genre in genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre, \"total\":", "fandoms_in_story != []: for fandom in fandoms_in_story: fandom else: fandom = \"Fandom not", "import PyMongo from datetime import date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME')", "== True: selection = stories_collection.find() else: selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}})", "[]: for fandom in fandoms_in_story: fandom else: fandom = \"Fandom not added\" author", "= datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year - ((today.month, today.day) < (bday.month,", "in genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre, \"total\": count} story_count.append(count_genre)", "= date.today() bday = datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year - ((today.month,", "mongo.db.stories users_collection = mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type =", "if genre not in genres: genres.append(genre) if fandom not in fandoms: fandoms.append(fandom) if", "= stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating, \"total\": count} story_count.append(count_rating) for genre in", "this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent", "def calculate_age(born): today = date.today() bday = datetime.strptime(born, '%Y-%m-%d') age = today.year -", "app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection = mongo.db.users", "{} ratings = [] genres = [] fandoms = [] authors = []", "else: selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection: rating", "not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\": authors}) return", "upsert=True) return flash(\"Report sent to admins.\") def calculate_age(born): today = date.today() bday =", "ratings.append(rating) if genre not in genres: genres.append(genre) if fandom not in fandoms: fandoms.append(fandom)", "app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\"", "return list_by_type def story_count(): story_count = [] ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"]", "author = story['author'] if rating not in ratings: ratings.append(rating) if genre not in", "os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection =", "functions\"\"\" def list_by_type(): list_by_type = {} ratings = [] genres = [] fandoms", "= None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type = {} ratings = [] genres", "list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\": authors}) return list_by_type def story_count(): story_count", "\"fandoms\": fandoms, \"authors\": authors}) return list_by_type def story_count(): story_count = [] ratings_list =", "count} story_count.append(count_rating) for genre in genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\":", "os from flask import Flask, app, flash, session from flask_pymongo import PyMongo from", "rating, \"total\": count} story_count.append(count_rating) for genre in genres_list: count = stories_collection.count_documents({\"genres\": genre}) count_genre", "in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom)", "datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year - ((today.month, today.day) < (bday.month, bday.day))", "session.get('is_adult') == True: selection = stories_collection.find() else: selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\",", "count = stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author, \"total\": count} story_count.append(count_author) return story_count", "from flask_pymongo import PyMongo from datetime import date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"]", "[] ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"]", "authors_list: count = stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author, \"total\": count} story_count.append(count_author) return", "ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for", "fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating in ratings_list: count = stories_collection.count_documents({\"rating\":", "= list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating in ratings_list: count", "ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating, \"total\": count} story_count.append(count_rating) for", "rating}) count_rating = {\"rating\": rating, \"total\": count} story_count.append(count_rating) for genre in genres_list: count", "os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection =", "None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type = {} ratings = [] genres =", "stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection: rating = story['rating'] genres_in_story", "!= []: for fandom in fandoms_in_story: fandom else: fandom = \"Fandom not added\"", "story_count.append(count_fandom) for author in authors_list: count = stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author,", "count_author = {\"author\": author, \"total\": count} story_count.append(count_author) return story_count def report(item, reason_given, this_story,", "rating not in ratings: ratings.append(rating) if genre not in genres: genres.append(genre) if fandom", "sent to admins.\") def calculate_age(born): today = date.today() bday = datetime.strptime(born, '%Y-%m-%d') age", "rating in ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating, \"total\": count}", "bday = datetime.strptime(born, '%Y-%m-%d') age = today.year - bday.year - ((today.month, today.day) <", "PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\"", "[] genres = [] fandoms = [] authors = [] if session.get('is_adult') ==", "count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for author", "{\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for author in authors_list: count = stories_collection.count_documents({\"author\": author})", "genres_in_story = story.get('genres') if genres_in_story != []: for genre in genres_in_story: genre fandoms_in_story", "[]: for genre in genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story != []:", "= [] ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list =", "{\"author\": author, \"total\": count} story_count.append(count_author) return story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\":", "list_by_type = {} ratings = [] genres = [] fandoms = [] authors", "= [] genres = [] fandoms = [] authors = [] if session.get('is_adult')", "= [] authors = [] if session.get('is_adult') == True: selection = stories_collection.find() else:", "selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection: rating =", "from datetime import date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] =", "\"Adult/NSFW\"]}}) for story in selection: rating = story['rating'] genres_in_story = story.get('genres') if genres_in_story", "genre}) count_genre = {\"genre\": genre, \"total\": count} story_count.append(count_genre) for fandom in fandoms_list: count", "'%Y-%m-%d') age = today.year - bday.year - ((today.month, today.day) < (bday.month, bday.day)) return", "story_count = [] ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list", "count = stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre, \"total\": count} story_count.append(count_genre) for fandom", "{\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent to admins.\") def", "count_genre = {\"genre\": genre, \"total\": count} story_count.append(count_genre) for fandom in fandoms_list: count =", "for fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom, \"total\":", "not in ratings: ratings.append(rating) if genre not in genres: genres.append(genre) if fandom not", "= stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for author in", "count_rating = {\"rating\": rating, \"total\": count} story_count.append(count_rating) for genre in genres_list: count =", "item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent to admins.\") def calculate_age(born):", "else: fandom = \"Fandom not added\" author = story['author'] if rating not in", "flask import Flask, app, flash, session from flask_pymongo import PyMongo from datetime import", "import Flask, app, flash, session from flask_pymongo import PyMongo from datetime import date,", "if fandoms_in_story != []: for fandom in fandoms_in_story: fandom else: fandom = \"Fandom", "mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection = None", "= story['rating'] genres_in_story = story.get('genres') if genres_in_story != []: for genre in genres_in_story:", "= list_by_type()[\"authors\"] for rating in ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\":", "author, \"total\": count} story_count.append(count_author) return story_count def report(item, reason_given, this_story, reported_by): stories_collection.find_one_and_update({\"url\": this_story},", "fandom in fandoms_in_story: fandom else: fandom = \"Fandom not added\" author = story['author']", "mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type = {} ratings =", "= stories_collection.count_documents({\"genres\": genre}) count_genre = {\"genre\": genre, \"total\": count} story_count.append(count_genre) for fandom in", "genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating in ratings_list:", "= mongo.db.users fake_collection = None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type = {} ratings", "for author in authors_list: count = stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author, \"total\":", "= os.getenv('SECRET_KEY') mongo = PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection", "for rating in ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating, \"total\":", "genres_in_story != []: for genre in genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story", "in fandoms_in_story: fandom else: fandom = \"Fandom not added\" author = story['author'] if", "date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] =", "Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo = PyMongo(app)", "author in authors_list: count = stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author, \"total\": count}", "= [] if session.get('is_adult') == True: selection = stories_collection.find() else: selection = stories_collection.find(", "stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating, \"total\": count} story_count.append(count_rating) for genre in genres_list:", "\"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent to admins.\") def calculate_age(born): today = date.today()", "if author not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\":", "genre in genres_in_story: genre fandoms_in_story = story.get('fandoms') if fandoms_in_story != []: for fandom", "app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"] = os.getenv('SECRET_KEY') mongo", "import date, datetime app = Flask(__name__) app.config[\"MONGO_DBNAME\"] = os.getenv('MONGO_DBNAME') app.config[\"MONGO_URI\"] = os.getenv('MONGO_URI') app.config[\"SECRET_KEY\"]", "if genres_in_story != []: for genre in genres_in_story: genre fandoms_in_story = story.get('fandoms') if", "ratings: ratings.append(rating) if genre not in genres: genres.append(genre) if fandom not in fandoms:", "stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\": fandom, \"total\": count} story_count.append(count_fandom) for author in authors_list:", "= [] fandoms = [] authors = [] if session.get('is_adult') == True: selection", "author}) count_author = {\"author\": author, \"total\": count} story_count.append(count_author) return story_count def report(item, reason_given,", "[] fandoms = [] authors = [] if session.get('is_adult') == True: selection =", "= \"Fandom not added\" author = story['author'] if rating not in ratings: ratings.append(rating)", "genre, \"total\": count} story_count.append(count_genre) for fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom", "{\"genre\": genre, \"total\": count} story_count.append(count_genre) for fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom})", "ratings = [] genres = [] fandoms = [] authors = [] if", "= PyMongo(app) \"\"\"Collections\"\"\" stories_collection = mongo.db.stories users_collection = mongo.db.users fake_collection = None \"\"\"Helper", "in ratings_list: count = stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating, \"total\": count} story_count.append(count_rating)", "story_count(): story_count = [] ratings_list = list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"]", "genres = [] fandoms = [] authors = [] if session.get('is_adult') == True:", "= story.get('genres') if genres_in_story != []: for genre in genres_in_story: genre fandoms_in_story =", "authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\": authors}) return list_by_type def story_count():", "count = stories_collection.count_documents({\"rating\": rating}) count_rating = {\"rating\": rating, \"total\": count} story_count.append(count_rating) for genre", "fandoms_in_story: fandom else: fandom = \"Fandom not added\" author = story['author'] if rating", "stories_collection.find() else: selection = stories_collection.find( {\"rating\": {\"$nin\": [\"R/Adult/NSFW\", \"Adult/NSFW\"]}}) for story in selection:", "genres, \"fandoms\": fandoms, \"authors\": authors}) return list_by_type def story_count(): story_count = [] ratings_list", "fandom else: fandom = \"Fandom not added\" author = story['author'] if rating not", "genre not in genres: genres.append(genre) if fandom not in fandoms: fandoms.append(fandom) if author", "not added\" author = story['author'] if rating not in ratings: ratings.append(rating) if genre", "list_by_type()[\"ratings\"] genres_list = list_by_type()[\"genres\"] fandoms_list = list_by_type()[\"fandoms\"] authors_list = list_by_type()[\"authors\"] for rating in", "stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author, \"total\": count} story_count.append(count_author) return story_count def report(item,", "\"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report sent to admins.\") def calculate_age(born): today", "count} story_count.append(count_genre) for fandom in fandoms_list: count = stories_collection.count_documents({\"fandoms\": fandom}) count_fandom = {\"fandom\":", "reported_by): stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return", "fandoms, \"authors\": authors}) return list_by_type def story_count(): story_count = [] ratings_list = list_by_type()[\"ratings\"]", "from flask import Flask, app, flash, session from flask_pymongo import PyMongo from datetime", "reason_given}}}, upsert=True) return flash(\"Report sent to admins.\") def calculate_age(born): today = date.today() bday", "author not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\": genres, \"fandoms\": fandoms, \"authors\": authors})", "\"genres\": genres, \"fandoms\": fandoms, \"authors\": authors}) return list_by_type def story_count(): story_count = []", "fake_collection = None \"\"\"Helper functions\"\"\" def list_by_type(): list_by_type = {} ratings = []", "not in fandoms: fandoms.append(fandom) if author not in authors: authors.append(author) list_by_type.update({\"ratings\": ratings, \"genres\":", "stories_collection.find_one_and_update({\"url\": this_story}, {'$push': {\"reports\": {\"item_reported\": item, \"reported_by\": reported_by, \"reason_given\": reason_given}}}, upsert=True) return flash(\"Report", "count} story_count.append(count_fandom) for author in authors_list: count = stories_collection.count_documents({\"author\": author}) count_author = {\"author\":", "= stories_collection.count_documents({\"author\": author}) count_author = {\"author\": author, \"total\": count} story_count.append(count_author) return story_count def", "\"Fandom not added\" author = story['author'] if rating not in ratings: ratings.append(rating) if" ]
[ "(self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok =", "self.execution_uuid = uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition,", "topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_reject(self): get_mock = AsyncMock() self.broker.get_one", "topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock", "def setUp(self) -> None: super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker definition =", "self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"),", "= uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}),", "from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, )", "topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def", "ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import (", "= get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid,", "), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid,", "from tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp()", "minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from", "import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid =", "), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [", "True self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual(", "] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid,", "topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid,", "async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock", "AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid,", "self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [", "self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"),", "test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock = AsyncMock()", "def test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send =", "LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import ( MinosTestCase,", "( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import", ") from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter,", "LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import ( MinosTestCase, )", "definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ],", "get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\",", "self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True),", "\"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock =", "send_mock.call_args_list, ) async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one =", "topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_commit_false(self): get_mock =", "super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [", "], send_mock.call_args_list, ) async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one", "= TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid,", "call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ],", "reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, )", "= send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid,", "= AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"),", "( uuid4, ) from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext,", "(self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock()", "False self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await", "uuid4, ) from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution,", "RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps)", "], ), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected =", "definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"},", "topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list,", "import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils", "import ( uuid4, ) from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution, Saga,", "send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid,", "def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock =", "reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"),", "expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async", "= AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\",", "call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_reject(self): get_mock", "topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok", ") from uuid import ( uuid4, ) from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep,", "LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}),", ") async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock", "topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) if __name__ == \"__main__\": unittest.main()", "[ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) if __name__ ==", "TransactionCommitter, ) from tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) ->", "None: super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps =", "noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution(", "{\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ),", "# noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}),", "= True self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit()", "(self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock", "= send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ],", "AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual(", "topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def", "self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"),", "tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid", "self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"),", "self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) if", "[ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid,", "ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}),", "= LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution(", "call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ],", "self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid,", "= [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid,", "LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition],", "call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_commit_false(self): get_mock", "RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[", "Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase):", "get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send", "SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer =", "AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\",", "inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ),", "send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"),", "self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock", "ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid, \"bar\"),", "{\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition,", "] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one", "SagaExecution, TransactionCommitter, ) from tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self)", "[ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid,", "def test_transactions(self): expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected,", "TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker definition", "self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) if __name__", "= [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def", "steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid,", "get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"),", "reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_reject(self):", "send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"),", "send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list,", ") from tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None:", "MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid = uuid4() #", "= False self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError):", "self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer", "PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition,", "call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"),", "AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock", "get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock with", "import ( AsyncMock, call, ) from uuid import ( uuid4, ) from minos.saga", "send_mock.call_args_list, ) async def test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock send_mock =", "with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid,", "call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_reject(self): get_mock = AsyncMock() self.broker.get_one =", "test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock", "await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"),", "[ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition, {\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(),", "= get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid,", "call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async", "{\"bar\"}), ConditionalSagaStepExecution( definition, {\"bar\"}, inner=SagaExecution( Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition,", "call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) if __name__ == \"__main__\":", "def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock =", "= send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\",", "class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker", "call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"),", "setUp(self) -> None: super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep)", "\"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok", "get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send", "[ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self):", "call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_commit_false(self): get_mock = AsyncMock()", "await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, )", "get_mock = AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await", "send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\",", "], send_mock.call_args_list, ) async def test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock send_mock", "from uuid import ( uuid4, ) from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution,", ") class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid = uuid4() # noinspection", "= AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send =", "self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [ call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"),", "unittest.mock import ( AsyncMock, call, ) from uuid import ( uuid4, ) from", "async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock", "topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid,", "reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, )", "AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"),", "self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\",", "reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_commit_false(self):", "uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps = [ RemoteSagaStepExecution(definition, {\"foo\"}), LocalSagaStepExecution(definition,", "SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import ( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def", "\"foobar\"), ] self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True", "reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"),", "from unittest.mock import ( AsyncMock, call, ) from uuid import ( uuid4, )", "test_transactions(self): expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ] self.assertEqual(expected, self.committer.transactions)", "RemoteSagaStepExecution, Saga, SagaContext, SagaExecution, TransactionCommitter, ) from tests.utils import ( MinosTestCase, ) class", "committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ]", "topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok = False", "topic=\"RejectBarTransaction\"), call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_reject(self): get_mock =", "import unittest from unittest.mock import ( AsyncMock, call, ) from uuid import (", "unittest from unittest.mock import ( AsyncMock, call, ) from uuid import ( uuid4,", "( AsyncMock, call, ) from uuid import ( uuid4, ) from minos.saga import", "self.assertEqual(expected, self.committer.transactions) async def test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one =", "TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"),", "{\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self): expected", "self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid,", "( MinosTestCase, ) class TestTransactionCommitter(MinosTestCase): def setUp(self) -> None: super().setUp() self.execution_uuid = uuid4()", "call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_commit_false(self): get_mock = AsyncMock() get_mock.return_value.data.ok =", ") async def test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock()", "AsyncMock, call, ) from uuid import ( uuid4, ) from minos.saga import (", "-> None: super().setUp() self.execution_uuid = uuid4() # noinspection PyTypeChecker definition = LocalSagaStep(on_execute=LocalSagaStep) self.executed_steps", "= AsyncMock() get_mock.return_value.data.ok = False self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send =", "get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await", "= AsyncMock() self.broker_publisher.send = send_mock await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid, topic=\"ReserveBarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid,", "call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list, ) async", "RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def test_transactions(self):", "uuid import ( uuid4, ) from minos.saga import ( ConditionalSagaStepExecution, LocalSagaStep, LocalSagaStepExecution, RemoteSagaStepExecution,", "self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject() self.assertEqual( [", "AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock", "call(data=self.execution_uuid, topic=\"RejectFooTransaction\"), call(data=self.execution_uuid, topic=\"RejectFoobarTransaction\"), ], send_mock.call_args_list, ) async def test_reject(self): get_mock = AsyncMock()", "async def test_reject(self): get_mock = AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send", "get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [", "self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit()", "= AsyncMock() self.broker.get_one = get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock await self.committer.reject()", "call, ) from uuid import ( uuid4, ) from minos.saga import ( ConditionalSagaStepExecution,", "= get_mock send_mock = AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual(", "self.executed_steps) def test_transactions(self): expected = [ (self.execution_uuid, \"bar\"), (self.execution_uuid, \"foo\"), (self.execution_uuid, \"foobar\"), ]", "topic=\"ReserveFooTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"ReserveFoobarTransaction\", reply_topic=\"TheReplyTopic\"), call(data=self.execution_uuid, topic=\"CommitBarTransaction\"), call(data=self.execution_uuid, topic=\"CommitFooTransaction\"), call(data=self.execution_uuid, topic=\"CommitFoobarTransaction\"), ], send_mock.call_args_list,", "{\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition), ] self.committer = TransactionCommitter(self.execution_uuid, self.executed_steps) def", "Saga(steps=[definition], committed=True), self.execution_uuid, SagaContext(), steps=[ RemoteSagaStepExecution(definition, {\"foo\"}), RemoteSagaStepExecution(definition, {\"foobar\"}), ], ), ), ConditionalSagaStepExecution(definition),", "send_mock = AsyncMock() self.broker_publisher.send = send_mock with self.assertRaises(ValueError): await self.committer.commit() self.assertEqual( [ call(data=self.execution_uuid,", "test_commit_true(self): get_mock = AsyncMock() get_mock.return_value.data.ok = True self.broker.get_one = get_mock send_mock = AsyncMock()" ]
[]
[ "= pymk.SQLiteDB(args.db) with db.session() as session: ns = session.get_namespace() with args.input: import_file(session, ns,", "time def import_file(session, ns, f, batch_size=1000): links = [] i = 0 start", "in %.2fs (total: %d links)' % ( batch_size, (time.perf_counter() - start), i ),", "links = [] i = 0 start = time.perf_counter() for link in pymk.tokenize(f,", "<gh_stars>1-10 #!/usr/bin/python3 import argparse import pymk import sys import time def import_file(session, ns,", "write to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size to use for", "\"-\" to read from STDIN' ) args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with", "to use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file,", "start), i ), end='') start = time.perf_counter() if len(links) > 0: session.create_links(links) def", "i += 1 if len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links", "text files into a MarkovBot database' ) parser.add_argument( '-d', '--db', required=True, type=str, help='Filename", "argparse import pymk import sys import time def import_file(session, ns, f, batch_size=1000): links", "import_file(session, ns, f, batch_size=1000): links = [] i = 0 start = time.perf_counter()", "pymk.SQLiteDB(args.db) with db.session() as session: ns = session.get_namespace() with args.input: import_file(session, ns, args.input,", "batch_size, (time.perf_counter() - start), i ), end='') start = time.perf_counter() if len(links) >", "link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if len(links) > batch_size: session.create_links(links[:batch_size])", "pymk import sys import time def import_file(session, ns, f, batch_size=1000): links = []", "ns, f, batch_size=1000): links = [] i = 0 start = time.perf_counter() for", "help='Filename for the SQLite3 database to write to' ) parser.add_argument( '-b', '--batch-size', default=1000,", "db.session() as session: ns = session.get_namespace() with args.input: import_file(session, ns, args.input, args.batch_size) if", "import argparse import pymk import sys import time def import_file(session, ns, f, batch_size=1000):", "required=True, type=str, help='Filename for the SQLite3 database to write to' ) parser.add_argument( '-b',", "from STDIN' ) args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as session:", "errors='replace'), help='Input text file, or \"-\" to read from STDIN' ) args =", "( batch_size, (time.perf_counter() - start), i ), end='') start = time.perf_counter() if len(links)", "start = time.perf_counter() if len(links) > 0: session.create_links(links) def main(): parser = argparse.ArgumentParser(", "print('\\r%d links imported in %.2fs (total: %d links)' % ( batch_size, (time.perf_counter() -", "STDIN' ) args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as session: ns", "= 0 start = time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i +=", "def main(): parser = argparse.ArgumentParser( description='Import text files into a MarkovBot database' )", "for the SQLite3 database to write to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int,", "time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if len(links) >", "parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as session: ns = session.get_namespace() with args.input:", "= time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if len(links)", "'-d', '--db', required=True, type=str, help='Filename for the SQLite3 database to write to' )", "= session.get_namespace() with args.input: import_file(session, ns, args.input, args.batch_size) if __name__ == '__main__': main()", "'--db', required=True, type=str, help='Filename for the SQLite3 database to write to' ) parser.add_argument(", "args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as session: ns = session.get_namespace()", "imported in %.2fs (total: %d links)' % ( batch_size, (time.perf_counter() - start), i", "to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size to use for inserts'", "%.2fs (total: %d links)' % ( batch_size, (time.perf_counter() - start), i ), end='')", "i ), end='') start = time.perf_counter() if len(links) > 0: session.create_links(links) def main():", "database to write to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size to", "+= 1 if len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links imported", "= [] i = 0 start = time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length):", "main(): parser = argparse.ArgumentParser( description='Import text files into a MarkovBot database' ) parser.add_argument(", "parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size to use for inserts' ) parser.add_argument(", "inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or \"-\" to", "time.perf_counter() if len(links) > 0: session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import text", "read from STDIN' ) args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as", "pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if len(links) > batch_size: session.create_links(links[:batch_size]) links =", "type=str, help='Filename for the SQLite3 database to write to' ) parser.add_argument( '-b', '--batch-size',", "with db.session() as session: ns = session.get_namespace() with args.input: import_file(session, ns, args.input, args.batch_size)", "import sys import time def import_file(session, ns, f, batch_size=1000): links = [] i", "batch_size=1000): links = [] i = 0 start = time.perf_counter() for link in", "type=int, help='Batch size to use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'),", "help='Input text file, or \"-\" to read from STDIN' ) args = parser.parse_args(sys.argv[1:])", ") args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as session: ns =", "= time.perf_counter() if len(links) > 0: session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import", "in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if len(links) > batch_size: session.create_links(links[:batch_size]) links", "'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or \"-\" to read from STDIN'", "description='Import text files into a MarkovBot database' ) parser.add_argument( '-d', '--db', required=True, type=str,", "'-b', '--batch-size', default=1000, type=int, help='Batch size to use for inserts' ) parser.add_argument( 'input',", "start = time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if", "> batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links imported in %.2fs (total: %d", "as session: ns = session.get_namespace() with args.input: import_file(session, ns, args.input, args.batch_size) if __name__", "to write to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size to use", "(time.perf_counter() - start), i ), end='') start = time.perf_counter() if len(links) > 0:", "default=1000, type=int, help='Batch size to use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8',", "text file, or \"-\" to read from STDIN' ) args = parser.parse_args(sys.argv[1:]) db", "session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links imported in %.2fs (total: %d links)' %", "= argparse.ArgumentParser( description='Import text files into a MarkovBot database' ) parser.add_argument( '-d', '--db',", "f, batch_size=1000): links = [] i = 0 start = time.perf_counter() for link", "% ( batch_size, (time.perf_counter() - start), i ), end='') start = time.perf_counter() if", "to read from STDIN' ) args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session()", "size to use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text", "links)' % ( batch_size, (time.perf_counter() - start), i ), end='') start = time.perf_counter()", "links.append(link) i += 1 if len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d", "[] i = 0 start = time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link)", "%d links)' % ( batch_size, (time.perf_counter() - start), i ), end='') start =", "the SQLite3 database to write to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch", "> 0: session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import text files into a", "import time def import_file(session, ns, f, batch_size=1000): links = [] i = 0", "end='') start = time.perf_counter() if len(links) > 0: session.create_links(links) def main(): parser =", "argparse.ArgumentParser( description='Import text files into a MarkovBot database' ) parser.add_argument( '-d', '--db', required=True,", "session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import text files into a MarkovBot database'", ") parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size to use for inserts' )", "help='Batch size to use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input", "), end='') start = time.perf_counter() if len(links) > 0: session.create_links(links) def main(): parser", "import pymk import sys import time def import_file(session, ns, f, batch_size=1000): links =", "links = links[batch_size:] print('\\r%d links imported in %.2fs (total: %d links)' % (", "database' ) parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for the SQLite3 database to", "parser = argparse.ArgumentParser( description='Import text files into a MarkovBot database' ) parser.add_argument( '-d',", "parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or \"-\" to read from", "for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or \"-\"", "link_length=ns.link_length): links.append(link) i += 1 if len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:]", "= parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db) with db.session() as session: ns = session.get_namespace() with", "(total: %d links)' % ( batch_size, (time.perf_counter() - start), i ), end='') start", "1 if len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links imported in", "#!/usr/bin/python3 import argparse import pymk import sys import time def import_file(session, ns, f,", "ns = session.get_namespace() with args.input: import_file(session, ns, args.input, args.batch_size) if __name__ == '__main__':", "files into a MarkovBot database' ) parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for", "sys import time def import_file(session, ns, f, batch_size=1000): links = [] i =", "for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1 if len(links) > batch_size:", "def import_file(session, ns, f, batch_size=1000): links = [] i = 0 start =", "db = pymk.SQLiteDB(args.db) with db.session() as session: ns = session.get_namespace() with args.input: import_file(session,", "links[batch_size:] print('\\r%d links imported in %.2fs (total: %d links)' % ( batch_size, (time.perf_counter()", "'--batch-size', default=1000, type=int, help='Batch size to use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r',", "use for inserts' ) parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or", ") parser.add_argument( 'input', type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or \"-\" to read", "len(links) > 0: session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import text files into", "into a MarkovBot database' ) parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for the", "type=argparse.FileType('r', encoding='utf-8', errors='replace'), help='Input text file, or \"-\" to read from STDIN' )", "session: ns = session.get_namespace() with args.input: import_file(session, ns, args.input, args.batch_size) if __name__ ==", "batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links imported in %.2fs (total: %d links)'", "encoding='utf-8', errors='replace'), help='Input text file, or \"-\" to read from STDIN' ) args", "0: session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import text files into a MarkovBot", "file, or \"-\" to read from STDIN' ) args = parser.parse_args(sys.argv[1:]) db =", ") parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for the SQLite3 database to write", "- start), i ), end='') start = time.perf_counter() if len(links) > 0: session.create_links(links)", "len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links imported in %.2fs (total:", "SQLite3 database to write to' ) parser.add_argument( '-b', '--batch-size', default=1000, type=int, help='Batch size", "if len(links) > batch_size: session.create_links(links[:batch_size]) links = links[batch_size:] print('\\r%d links imported in %.2fs", "or \"-\" to read from STDIN' ) args = parser.parse_args(sys.argv[1:]) db = pymk.SQLiteDB(args.db)", "i = 0 start = time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i", "if len(links) > 0: session.create_links(links) def main(): parser = argparse.ArgumentParser( description='Import text files", "parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for the SQLite3 database to write to'", "links imported in %.2fs (total: %d links)' % ( batch_size, (time.perf_counter() - start),", "0 start = time.perf_counter() for link in pymk.tokenize(f, link_length=ns.link_length): links.append(link) i += 1", "a MarkovBot database' ) parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for the SQLite3", "= links[batch_size:] print('\\r%d links imported in %.2fs (total: %d links)' % ( batch_size,", "MarkovBot database' ) parser.add_argument( '-d', '--db', required=True, type=str, help='Filename for the SQLite3 database" ]
[ "# class BaseMee6PyAPIError(Exception): ''' Base error for any exceptions caused by this api", "pass class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401", "Base error for any exceptions caused by communication with remote server ''' pass", "400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): '''", "any exceptions caused by communication with remote server ''' pass class BadRequestError(HTTPRequestError): '''", "class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401 '''", "''' pass class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error", "class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 '''", "for any exceptions caused by communication with remote server ''' pass class BadRequestError(HTTPRequestError):", "''' pass # # Local api # class BaseMee6PyAPIError(Exception): ''' Base error for", "class HTTPRequestError(Exception): ''' Base error for any exceptions caused by communication with remote", "''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error", "# class HTTPRequestError(Exception): ''' Base error for any exceptions caused by communication with", "with remote server ''' pass class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class", "''' Error 429 ''' pass # # Local api # class BaseMee6PyAPIError(Exception): '''", "# # HTTP # class HTTPRequestError(Exception): ''' Base error for any exceptions caused", "# # Local api # class BaseMee6PyAPIError(Exception): ''' Base error for any exceptions", "pass # # Local api # class BaseMee6PyAPIError(Exception): ''' Base error for any", "pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass # # Local api #", "# Local api # class BaseMee6PyAPIError(Exception): ''' Base error for any exceptions caused", "TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass # # Local api # class BaseMee6PyAPIError(Exception):", "Error 400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError):", "''' Base error for any exceptions caused by communication with remote server '''", "Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass # #", "class BaseMee6PyAPIError(Exception): ''' Base error for any exceptions caused by this api '''", "# HTTP # class HTTPRequestError(Exception): ''' Base error for any exceptions caused by", "Error 429 ''' pass # # Local api # class BaseMee6PyAPIError(Exception): ''' Base", "communication with remote server ''' pass class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass", "''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass # # Local api", "UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass", "''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass #", "pass class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429", "''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass class", "api # class BaseMee6PyAPIError(Exception): ''' Base error for any exceptions caused by this", "BaseMee6PyAPIError(Exception): ''' Base error for any exceptions caused by this api ''' pass", "429 ''' pass # # Local api # class BaseMee6PyAPIError(Exception): ''' Base error", "exceptions caused by communication with remote server ''' pass class BadRequestError(HTTPRequestError): ''' Error", "by communication with remote server ''' pass class BadRequestError(HTTPRequestError): ''' Error 400 '''", "HTTPRequestError(Exception): ''' Base error for any exceptions caused by communication with remote server", "error for any exceptions caused by communication with remote server ''' pass class", "Local api # class BaseMee6PyAPIError(Exception): ''' Base error for any exceptions caused by", "BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError): ''' Error 401 ''' pass", "class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass # # Local api # class", "HTTP # class HTTPRequestError(Exception): ''' Base error for any exceptions caused by communication", "401 ''' pass class TooManyRequestsError(HTTPRequestError): ''' Error 429 ''' pass # # Local", "caused by communication with remote server ''' pass class BadRequestError(HTTPRequestError): ''' Error 400", "server ''' pass class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError): '''", "remote server ''' pass class BadRequestError(HTTPRequestError): ''' Error 400 ''' pass class UnauthorizedError(HTTPRequestError):" ]
[ ":rtype: list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls: The", "else self.obj_cls(item)) self.data = items else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) #", "= items else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests", "= R(list, type(None)) @property def schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema] def", "key=None, reverse=False): \"\"\" :param obj_cls: DataObject class reference to wrap each object in", "list of DataObject's. \"\"\" _restriction = R(list, type(None)) @property def schema_value(self): \"\"\" :rtype:", "do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this when you need a restriction", "when you need a restriction for a list of DataObject's. \"\"\" _restriction =", "if self.data is not None: items = [] for item in self.data: items.append(item", "obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param obj_cls: DataObject class reference to wrap each", "\"\"\" Sort the data list after ManagedList does its work. \"\"\" super(OrderedManagedList, self).manage()", "TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param", "2020-06-28 \"\"\" from do_py.common import R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import", "items = [] for item in self.data: items.append(item if type(item) == self.obj_cls else", "self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort the data list", ":date_created: 2020-06-28 \"\"\" from do_py.common import R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions", "bool \"\"\" self.key = key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self):", "key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort the data", ":type obj_cls: DataObject :param nullable: Valid values are a list of Do's or", "against. :type obj_cls: DataObject :param nullable: Valid values are a list of Do's", "item in self.data: items.append(item if type(item) == self.obj_cls else self.obj_cls(item)) self.data = items", "in list. :type nullable: bool :type key: function :type reverse: bool \"\"\" self.key", "items else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class", "\"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable = nullable def manage(self): if self.data", "self.nullable = nullable def manage(self): if self.data is not None: items = []", "Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param obj_cls:", "not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self,", "self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls,", "== self.obj_cls else self.obj_cls(item)) self.data = items else: if not self.nullable: raise RestrictionError.bad_data(self.data,", "nullable: bool :type key: function :type reverse: bool \"\"\" self.key = key self.reverse", "DataObject class reference to wrap each object in list. :type nullable: bool :type", "bool :type key: function :type reverse: bool \"\"\" self.key = key self.reverse =", "need a restriction for a list of DataObject's. \"\"\" _restriction = R(list, type(None))", "self.data is not None: items = [] for item in self.data: items.append(item if", "def schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\"", "def __init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param obj_cls: DataObject class reference to", "function :type reverse: bool \"\"\" self.key = key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls,", "@property def schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False):", "not None: items = [] for item in self.data: items.append(item if type(item) ==", "is not None: items = [] for item in self.data: items.append(item if type(item)", "you need a restriction for a list of DataObject's. \"\"\" _restriction = R(list,", "type(item) == self.obj_cls else self.obj_cls(item)) self.data = items else: if not self.nullable: raise", "DataObject's. \"\"\" _restriction = R(list, type(None)) @property def schema_value(self): \"\"\" :rtype: list[dict] \"\"\"", "self.data: items.append(item if type(item) == self.obj_cls else self.obj_cls(item)) self.data = items else: if", "do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this when", "type(None)) @property def schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls,", "The DO to check each value in the list against. :type obj_cls: DataObject", "__init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param obj_cls: DataObject class reference to wrap", ":param obj_cls: DataObject class reference to wrap each object in list. :type nullable:", ":type key: function :type reverse: bool \"\"\" self.key = key self.reverse = reverse", "schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\" :param", "else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList):", "\"\"\" :param obj_cls: The DO to check each value in the list against.", "DO to check each value in the list against. :type obj_cls: DataObject :param", "key: function :type reverse: bool \"\"\" self.key = key self.reverse = reverse super(OrderedManagedList,", "\"\"\" :param obj_cls: DataObject class reference to wrap each object in list. :type", "items.append(item if type(item) == self.obj_cls else self.obj_cls(item)) self.data = items else: if not", "def manage(self): \"\"\" Sort the data list after ManagedList does its work. \"\"\"", "Valid values are a list of Do's or a NoneType. :type nullable: bool", "RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this when you need a restriction for a", "\"\"\" from do_py.common import R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError", "R(list, type(None)) @property def schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self,", "if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList): def", "of Do's or a NoneType. :type nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls =", "reverse: bool \"\"\" self.key = key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def", "obj_cls self.nullable = nullable def manage(self): if self.data is not None: items =", "raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False,", "for a list of DataObject's. \"\"\" _restriction = R(list, type(None)) @property def schema_value(self):", "restriction for a list of DataObject's. \"\"\" _restriction = R(list, type(None)) @property def", "in the list against. :type obj_cls: DataObject :param nullable: Valid values are a", "self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False):", "nullable=False, key=None, reverse=False): \"\"\" :param obj_cls: DataObject class reference to wrap each object", "# TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\"", "this when you need a restriction for a list of DataObject's. \"\"\" _restriction", "bool \"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable = nullable def manage(self): if", "\"\"\" _restriction = R(list, type(None)) @property def schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return", "reference to wrap each object in list. :type nullable: bool :type key: function", "or a NoneType. :type nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable", "manage(self): if self.data is not None: items = [] for item in self.data:", "\"\"\" Use this when you need a restriction for a list of DataObject's.", "check each value in the list against. :type obj_cls: DataObject :param nullable: Valid", "list against. :type obj_cls: DataObject :param nullable: Valid values are a list of", "OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param obj_cls: DataObject class reference", "_restriction = R(list, type(None)) @property def schema_value(self): \"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema]", "None: items = [] for item in self.data: items.append(item if type(item) == self.obj_cls", "import ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this when you", "ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this when you need", "class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param obj_cls: DataObject class", "the list against. :type obj_cls: DataObject :param nullable: Valid values are a list", "to wrap each object in list. :type nullable: bool :type key: function :type", "import R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\"", "= obj_cls self.nullable = nullable def manage(self): if self.data is not None: items", "self.obj_cls else self.obj_cls(item)) self.data = items else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed)", "nullable: Valid values are a list of Do's or a NoneType. :type nullable:", "list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls: The DO", "self.obj_cls = obj_cls self.nullable = nullable def manage(self): if self.data is not None:", "list. :type nullable: bool :type key: function :type reverse: bool \"\"\" self.key =", "obj_cls: DataObject class reference to wrap each object in list. :type nullable: bool", "a list of DataObject's. \"\"\" _restriction = R(list, type(None)) @property def schema_value(self): \"\"\"", "for item in self.data: items.append(item if type(item) == self.obj_cls else self.obj_cls(item)) self.data =", "Do's or a NoneType. :type nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls", "= nullable def manage(self): if self.data is not None: items = [] for", "of DataObject's. \"\"\" _restriction = R(list, type(None)) @property def schema_value(self): \"\"\" :rtype: list[dict]", ":type reverse: bool \"\"\" self.key = key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable)", "do_py.common import R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions):", "object in list. :type nullable: bool :type key: function :type reverse: bool \"\"\"", "Use this when you need a restriction for a list of DataObject's. \"\"\"", "from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this when you need a", ":param obj_cls: The DO to check each value in the list against. :type", "obj_cls: DataObject :param nullable: Valid values are a list of Do's or a", "a NoneType. :type nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable =", "list after ManagedList does its work. \"\"\" super(OrderedManagedList, self).manage() self.data = sorted(self.data, key=self.key,", "obj_cls, nullable=False): \"\"\" :param obj_cls: The DO to check each value in the", "a list of Do's or a NoneType. :type nullable: bool \"\"\" super(ManagedList, self).__init__()", "[] for item in self.data: items.append(item if type(item) == self.obj_cls else self.obj_cls(item)) self.data", "self.data = items else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit", "def __init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls: The DO to check each value", "value in the list against. :type obj_cls: DataObject :param nullable: Valid values are", "nullable def manage(self): if self.data is not None: items = [] for item", ":type nullable: bool :type key: function :type reverse: bool \"\"\" self.key = key", "nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable = nullable def manage(self):", "\"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls: The DO to", "self.obj_cls(item)) self.data = items else: if not self.nullable: raise RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO:", "from do_py.common import R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError class", "are a list of Do's or a NoneType. :type nullable: bool \"\"\" super(ManagedList,", "RestrictionError.bad_data(self.data, self._restriction.allowed) # TODO: Unit tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None,", "[self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls: The DO to check each", "\"\"\" :rtype: list[dict] \"\"\" return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls:", "ManagedList(ManagedRestrictions): \"\"\" Use this when you need a restriction for a list of", "in self.data: items.append(item if type(item) == self.obj_cls else self.obj_cls(item)) self.data = items else:", "wrap each object in list. :type nullable: bool :type key: function :type reverse:", "if type(item) == self.obj_cls else self.obj_cls(item)) self.data = items else: if not self.nullable:", "DataObject :param nullable: Valid values are a list of Do's or a NoneType.", ":type nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable = nullable def", "reverse=False): \"\"\" :param obj_cls: DataObject class reference to wrap each object in list.", ":param nullable: Valid values are a list of Do's or a NoneType. :type", "list of Do's or a NoneType. :type nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls", "super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort the data list after ManagedList does", "self).__init__() self.obj_cls = obj_cls self.nullable = nullable def manage(self): if self.data is not", "R from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use", "self.key = key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort", "= [] for item in self.data: items.append(item if type(item) == self.obj_cls else self.obj_cls(item))", "return [self.obj_cls.schema] def __init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls: The DO to check", "nullable=nullable) def manage(self): \"\"\" Sort the data list after ManagedList does its work.", "self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort the data list after ManagedList does its", "def manage(self): if self.data is not None: items = [] for item in", "super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable = nullable def manage(self): if self.data is", "a restriction for a list of DataObject's. \"\"\" _restriction = R(list, type(None)) @property", "obj_cls: The DO to check each value in the list against. :type obj_cls:", "each value in the list against. :type obj_cls: DataObject :param nullable: Valid values", "from do_py.data_object.restriction import ManagedRestrictions from do_py.exceptions import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this", "each object in list. :type nullable: bool :type key: function :type reverse: bool", "\"\"\" self.key = key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\"", "= key self.reverse = reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort the", "reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort the data list after ManagedList", "__init__(self, obj_cls, nullable=False): \"\"\" :param obj_cls: The DO to check each value in", "import RestrictionError class ManagedList(ManagedRestrictions): \"\"\" Use this when you need a restriction for", "to check each value in the list against. :type obj_cls: DataObject :param nullable:", "class reference to wrap each object in list. :type nullable: bool :type key:", "nullable=False): \"\"\" :param obj_cls: The DO to check each value in the list", "= reverse super(OrderedManagedList, self).__init__(obj_cls, nullable=nullable) def manage(self): \"\"\" Sort the data list after", "manage(self): \"\"\" Sort the data list after ManagedList does its work. \"\"\" super(OrderedManagedList,", "NoneType. :type nullable: bool \"\"\" super(ManagedList, self).__init__() self.obj_cls = obj_cls self.nullable = nullable", "values are a list of Do's or a NoneType. :type nullable: bool \"\"\"", "\"\"\" :date_created: 2020-06-28 \"\"\" from do_py.common import R from do_py.data_object.restriction import ManagedRestrictions from", "data list after ManagedList does its work. \"\"\" super(OrderedManagedList, self).manage() self.data = sorted(self.data,", "after ManagedList does its work. \"\"\" super(OrderedManagedList, self).manage() self.data = sorted(self.data, key=self.key, reverse=self.reverse)", "the data list after ManagedList does its work. \"\"\" super(OrderedManagedList, self).manage() self.data =", "tests class OrderedManagedList(ManagedList): def __init__(self, obj_cls, nullable=False, key=None, reverse=False): \"\"\" :param obj_cls: DataObject", "class ManagedList(ManagedRestrictions): \"\"\" Use this when you need a restriction for a list", "Sort the data list after ManagedList does its work. \"\"\" super(OrderedManagedList, self).manage() self.data" ]
[ "nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G,", "6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)", "[('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1)", "= nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list =", "G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'],", "-1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph() node_list =", "G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'],", "'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'],", "test_disconnected_graph(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1)", "2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph() node_list = ['A',", "bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2)", "bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2)", "'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2)", "self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph() node_list =", "self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph() node_list =", "import unittest import networkx as nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def", "nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A',", "'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'],", "'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0)", "'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'],", "'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0)", "self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'],", "'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0)", "self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph() node_list =", "'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2)", "0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self):", "1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph() node_list", "self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph() node_list = ['A', 'B',", "networkx as nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G =", "self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph() node_list = ['A', 'B',", "['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'],", "'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2)", "edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1)", "node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'],", "-1) def test_sequential(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "3) def test_parallel_7(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "def test_parallel_4(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "3) def test_parallel_3(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)", "0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self):", "'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2)", "1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph() node_list", "G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'],", "'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'],", "self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph()", "'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2)", "self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G", "-1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G =", "= [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'],", "test_parallel_2(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list =", "self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph() node_list = ['A', 'B',", "self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def", "self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5)", "'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0)", "G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0)", "edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1)", "self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "def test_disconnected_graph(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1)", "1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph() node_list", "'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'],", "G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'],", "= [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'],", "5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)", "def test_parallel_7(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)", "G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'],", "self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph() node_list = ['A', 'B',", "'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G", "self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1)", "2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self):", "3) def test_parallel_4(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'],", "bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2)", "G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self):", "as nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph()", "self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph() node_list = ['A', 'B',", "self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph()", "test_parallel_6(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G,", "G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1)", "def test_parallel_3(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph() node_list = ['A', 'B',", "'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'],", "'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'],", "TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "import networkx as nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G", "0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self):", "node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A',", "'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2)", "def test_parallel_2(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'],", "2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G = nx.Graph() node_list = ['A',", "1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G =", "bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G =", "class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "= ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1)", "self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph() node_list =", "'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'],", "edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1)", "'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4)", "G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)", "'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3)", "G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'],", "'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1)", "-1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph() node_list = ['A',", "'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1)", "'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0)", "G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph()", "3) def test_parallel_6(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes),", "self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G", "G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list", "unittest import networkx as nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self):", "'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A',", "self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph()", "7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)", "[('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1)", "'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'],", "self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph() node_list =", "[('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1)", "1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph() node_list", "edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1)", "self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def", "bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2)", "'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0)", "1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G =", "edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1)", "2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph() node_list =", "def test_parallel_6(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph()", "'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0)", "edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1)", "nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list", "= [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'],", "[('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1)", "= [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 4) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'],", "Medusa.graphs import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list = ['A',", "G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'],", "'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'],", "self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G", "'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'],", "[('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1)", "'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'],", "1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph() node_list", "self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self):", "1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G =", "'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'],", "[('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1)", "['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list)", "bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2)", "test_parallel_5(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'],", "test_parallel_7(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "= [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'],", "self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D',", "self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G", "self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_5(self): G = nx.Graph()", "2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph() node_list = ['A',", "G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'],", "G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'],", "= [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'],", "= nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) self.assertEqual(list(G.nodes), node_list)", "G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'],", "2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G = nx.Graph() node_list = ['A',", "bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2)", "1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph() node_list", "'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 1) self.assertEqual(G.nodes['A']['distance'],", "2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph() node_list = ['A',", "self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_4(self): G", "test_parallel_4(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "3) def test_parallel_5(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_6(self): G =", "[('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1)", "2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G = nx.Graph() node_list = ['A', 'B', 'C',", "'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'],", "'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 5) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2)", "node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B',", "self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G", "3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3)", "def test_parallel_5(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self):", "'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7) self.assertEqual(G.nodes['A']['distance'],", "import bfs class TestBFS(unittest.TestCase): def test_disconnected_graph(self): G = nx.Graph() node_list = ['A', 'B',", "def test_sequential(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F']", "self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_2(self): G = nx.Graph() node_list = ['A', 'B',", "#!/usr/bin/env python3 import unittest import networkx as nx from Medusa.graphs import bfs class", "'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2)", "bfs.breadth_first_search(G, 'A', 2) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2)", "2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph() node_list = ['A',", "test_parallel_3(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "-1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1) def test_sequential(self): G = nx.Graph() node_list", "= [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'],", "'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 6)", "python3 import unittest import networkx as nx from Medusa.graphs import bfs class TestBFS(unittest.TestCase):", "'A', 3) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], 1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'],", "3) def test_parallel_2(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E',", "self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_3(self): G = nx.Graph()", "1) self.assertEqual(G.nodes['C']['distance'], 1) self.assertEqual(G.nodes['D']['distance'], 2) self.assertEqual(G.nodes['E']['distance'], 2) self.assertEqual(G.nodes['F']['distance'], 3) def test_parallel_7(self): G =", "'A', 1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'],", "1) self.assertEqual(G.nodes['A']['distance'], 0) self.assertEqual(G.nodes['B']['distance'], -1) self.assertEqual(G.nodes['C']['distance'], -1) self.assertEqual(G.nodes['D']['distance'], -1) self.assertEqual(G.nodes['E']['distance'], -1) self.assertEqual(G.nodes['F']['distance'], -1)", "'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')] G.add_edges_from(edge_list) bfs.breadth_first_search(G, 'A', 7)", "test_sequential(self): G = nx.Graph() node_list = ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list)", "= ['A', 'B', 'C', 'D', 'E', 'F'] G.add_nodes_from(node_list) edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]" ]
[ "callable(val): i += 1 if i >= OVERFLOW: break val = val(context, root)", "i += 1 if i >= OVERFLOW: break val = val(context, root) return", "def type_of(value): if value is None: return TYPES.NONE if is_array(value): return TYPES.ARRAY if", "format_op == \"capitalize\": value = str(value) new_value = \"\" for i, c in", "return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root, handle_exception=None): if not context: context", "val = val(context, root) return val except Exception as e: if not handle_exception:", "\"html\"): value = str(value) escape_html = False elif format_op == \"encode\": value =", "is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return", "2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\",", "2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW", ">= OVERFLOW: break val = val(context, root) return val except Exception as e:", "if format_op in (\"raw\", \"html\"): value = str(value) escape_html = False elif format_op", "1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4", "+= c value = new_value else: if format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\"", "format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value)", "1 if i >= OVERFLOW: break val = val(context, root) return val except", "elif format_op in (\"allcaps\", \"caps\", \"upper\"): value = str(value).upper() elif format_op in (\"lower\",):", "not i or (not c.isspace() and value[i-1].isspace()): new_value += c.upper() else: new_value +=", "= str(value) escape_html = False elif format_op == \"encode\": value = str(value) escape_html", "= True elif format_op in (\"allcaps\", \"caps\", \"upper\"): value = str(value).upper() elif format_op", "if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION", "if not handle_exception: raise e return handle_exception(e) def format_value(value, format_op, escape_html=False): if value", "\"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value) else: value", "if not context: context = {} try: val = func i = 0", "{} try: val = func i = 0 while callable(val): i += 1", "str(value) escape_html = True elif format_op in (\"allcaps\", \"caps\", \"upper\"): value = str(value).upper()", "collections, collections.abc _types = { \"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0, \"VALUE\": 1,", "if value is None: return \"\" if format_op: if format_op in (\"raw\", \"html\"):", "<filename>lib/misc.py import collections, collections.abc _types = { \"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0,", "as e: if not handle_exception: raise e return handle_exception(e) def format_value(value, format_op, escape_html=False):", "handle_exception(e) def format_value(value, format_op, escape_html=False): if value is None: return \"\" if format_op:", "else: value = str(value) if escape_html: value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\")", "\"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES", "= _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test,", "if escape_html: value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\", "\"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\ .replace(\"\\\"\", \"&quot;\") \\ .replace(\"'\", \"&#039;\")", "\"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys()))", "c value = new_value else: if format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else:", "OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return isinstance(test,", "value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\ .replace(\"\\\"\", \"&quot;\")", "if not i or (not c.isspace() and value[i-1].isspace()): new_value += c.upper() else: new_value", "= {} try: val = func i = 0 while callable(val): i +=", "val = func i = 0 while callable(val): i += 1 if i", "escape_html = False elif format_op == \"encode\": value = str(value) escape_html = True", "\"\" for i, c in enumerate(value): if not i or (not c.isspace() and", "escape_html = True elif format_op in (\"allcaps\", \"caps\", \"upper\"): value = str(value).upper() elif", "elif format_op == \"encode\": value = str(value) escape_html = True elif format_op in", "return TYPES.VALUE def evalf(func, context, root, handle_exception=None): if not context: context = {}", "\"NONE\": 0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\":", "\"encode\": value = str(value) escape_html = True elif format_op in (\"allcaps\", \"caps\", \"upper\"):", "+= c.upper() else: new_value += c value = new_value else: if format_op[0] ==", "collections.abc _types = { \"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0, \"VALUE\": 1, \"STRING\":", "_NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types", "\"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\":", "TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root, handle_exception=None): if not context: context =", "else: if format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value", "= format_op.format(value) else: value = str(value) if escape_html: value = value.replace(\"&\", \"&amp;\") \\", "callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root, handle_exception=None): if not context:", "None: return TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if", "return isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def type_of(value): if value is None:", "TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root, handle_exception=None): if", "= \"{0:\"+format_op+\"}\" value = format_op.format(value) else: value = str(value) if escape_html: value =", "_NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def type_of(value): if", "def evalf(func, context, root, handle_exception=None): if not context: context = {} try: val", "1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3,", "not isinstance(test, str) def type_of(value): if value is None: return TYPES.NONE if is_array(value):", "collections.abc.Sequence) and not isinstance(test, str) def type_of(value): if value is None: return TYPES.NONE", "True elif format_op in (\"allcaps\", \"caps\", \"upper\"): value = str(value).upper() elif format_op in", "\"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\":", "format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value) else: value =", "} _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del _types,", "if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func,", "return handle_exception(e) def format_value(value, format_op, escape_html=False): if value is None: return \"\" if", "elif format_op in (\"lower\",): value = str(value).lower() elif format_op == \"capitalize\": value =", "c in enumerate(value): if not i or (not c.isspace() and value[i-1].isspace()): new_value +=", "c.isspace() and value[i-1].isspace()): new_value += c.upper() else: new_value += c value = new_value", "\"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\":", "root) return val except Exception as e: if not handle_exception: raise e return", "\"capitalize\": value = str(value) new_value = \"\" for i, c in enumerate(value): if", "= val(context, root) return val except Exception as e: if not handle_exception: raise", ".replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\ .replace(\"\\\"\", \"&quot;\") \\ .replace(\"'\", \"&#039;\") return value", "elif format_op == \"capitalize\": value = str(value) new_value = \"\" for i, c", "\"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value) else: value = str(value) if", "value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\ .replace(\"\\\"\", \"&quot;\") \\ .replace(\"'\",", "break val = val(context, root) return val except Exception as e: if not", "TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return", "return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE", "type_of(value): if value is None: return TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value,", "Exception as e: if not handle_exception: raise e return handle_exception(e) def format_value(value, format_op,", "(not c.isspace() and value[i-1].isspace()): new_value += c.upper() else: new_value += c value =", "= \"\" for i, c in enumerate(value): if not i or (not c.isspace()", "new_value = \"\" for i, c in enumerate(value): if not i or (not", "= 99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence)", "handle_exception: raise e return handle_exception(e) def format_value(value, format_op, escape_html=False): if value is None:", "else: format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value) else: value = str(value) if escape_html:", "== \"capitalize\": value = str(value) new_value = \"\" for i, c in enumerate(value):", "not context: context = {} try: val = func i = 0 while", "= str(value) if escape_html: value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\",", "== \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value) else:", "None: return \"\" if format_op: if format_op in (\"raw\", \"html\"): value = str(value)", "format_op in (\"raw\", \"html\"): value = str(value) escape_html = False elif format_op ==", "_types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def type_of(value):", "format_op == \"encode\": value = str(value) escape_html = True elif format_op in (\"allcaps\",", "value = str(value) new_value = \"\" for i, c in enumerate(value): if not", "return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root, handle_exception=None):", "if callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root, handle_exception=None): if not", "value is None: return TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return", "in (\"lower\",): value = str(value).lower() elif format_op == \"capitalize\": value = str(value) new_value", "\"caps\", \"upper\"): value = str(value).upper() elif format_op in (\"lower\",): value = str(value).lower() elif", "e: if not handle_exception: raise e return handle_exception(e) def format_value(value, format_op, escape_html=False): if", "return TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value):", "-1, \"NULL\": 0, \"NONE\": 0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2,", "False elif format_op == \"encode\": value = str(value) escape_html = True elif format_op", "= \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value) else: value = str(value)", "TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE def", "for i, c in enumerate(value): if not i or (not c.isspace() and value[i-1].isspace()):", "str(value) escape_html = False elif format_op == \"encode\": value = str(value) escape_html =", "_types = { \"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0, \"VALUE\": 1, \"STRING\": 1,", "\"upper\"): value = str(value).upper() elif format_op in (\"lower\",): value = str(value).lower() elif format_op", "format_value(value, format_op, escape_html=False): if value is None: return \"\" if format_op: if format_op", "= False elif format_op == \"encode\": value = str(value) escape_html = True elif", "= 0 while callable(val): i += 1 if i >= OVERFLOW: break val", "except Exception as e: if not handle_exception: raise e return handle_exception(e) def format_value(value,", "i = 0 while callable(val): i += 1 if i >= OVERFLOW: break", "not handle_exception: raise e return handle_exception(e) def format_value(value, format_op, escape_html=False): if value is", "escape_html=False): if value is None: return \"\" if format_op: if format_op in (\"raw\",", "raise e return handle_exception(e) def format_value(value, format_op, escape_html=False): if value is None: return", "\"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values()))", "root, handle_exception=None): if not context: context = {} try: val = func i", "value = str(value).lower() elif format_op == \"capitalize\": value = str(value) new_value = \"\"", "= new_value else: if format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op =", "format_op.format(value) else: value = str(value) if escape_html: value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\",", "= func i = 0 while callable(val): i += 1 if i >=", "format_op: if format_op in (\"raw\", \"html\"): value = str(value) escape_html = False elif", "format_op = \"{0:\"+format_op+\"}\" value = format_op.format(value) else: value = str(value) if escape_html: value", "str(value) if escape_html: value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\")", "collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test):", "== \"encode\": value = str(value) escape_html = True elif format_op in (\"allcaps\", \"caps\",", "(\"allcaps\", \"caps\", \"upper\"): value = str(value).upper() elif format_op in (\"lower\",): value = str(value).lower()", "= str(value).upper() elif format_op in (\"lower\",): value = str(value).lower() elif format_op == \"capitalize\":", "(\"lower\",): value = str(value).lower() elif format_op == \"capitalize\": value = str(value) new_value =", "handle_exception=None): if not context: context = {} try: val = func i =", "while callable(val): i += 1 if i >= OVERFLOW: break val = val(context,", "is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def type_of(value): if value is", "+= 1 if i >= OVERFLOW: break val = val(context, root) return val", "isinstance(test, str) def type_of(value): if value is None: return TYPES.NONE if is_array(value): return", "(\"raw\", \"html\"): value = str(value) escape_html = False elif format_op == \"encode\": value", "\"\" if format_op: if format_op in (\"raw\", \"html\"): value = str(value) escape_html =", "\"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4 }", "str(value) new_value = \"\" for i, c in enumerate(value): if not i or", "str(value).lower() elif format_op == \"capitalize\": value = str(value) new_value = \"\" for i,", "i or (not c.isspace() and value[i-1].isspace()): new_value += c.upper() else: new_value += c", "\"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW =", "new_value else: if format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\"", "e return handle_exception(e) def format_value(value, format_op, escape_html=False): if value is None: return \"\"", "value = new_value else: if format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op", "= str(value).lower() elif format_op == \"capitalize\": value = str(value) new_value = \"\" for", "TYPES.VALUE def evalf(func, context, root, handle_exception=None): if not context: context = {} try:", "4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del", "TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and not", "= str(value) escape_html = True elif format_op in (\"allcaps\", \"caps\", \"upper\"): value =", "or (not c.isspace() and value[i-1].isspace()): new_value += c.upper() else: new_value += c value", "\"{0:\"+format_op+\"}\" value = format_op.format(value) else: value = str(value) if escape_html: value = value.replace(\"&\",", "= str(value) new_value = \"\" for i, c in enumerate(value): if not i", "else: new_value += c value = new_value else: if format_op[0] == \"$\": format_op", "new_value += c value = new_value else: if format_op[0] == \"$\": format_op =", "1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types", "value = str(value).upper() elif format_op in (\"lower\",): value = str(value).lower() elif format_op ==", "is None: return \"\" if format_op: if format_op in (\"raw\", \"html\"): value =", "list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return", "in enumerate(value): if not i or (not c.isspace() and value[i-1].isspace()): new_value += c.upper()", "value = format_op.format(value) else: value = str(value) if escape_html: value = value.replace(\"&\", \"&amp;\")", "if format_op: if format_op in (\"raw\", \"html\"): value = str(value) escape_html = False", "3, \"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99", "99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and", "format_op in (\"allcaps\", \"caps\", \"upper\"): value = str(value).upper() elif format_op in (\"lower\",): value", "import collections, collections.abc _types = { \"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0, \"VALUE\":", "value = str(value) escape_html = False elif format_op == \"encode\": value = str(value)", "{ \"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1,", "def format_value(value, format_op, escape_html=False): if value is None: return \"\" if format_op: if", "func i = 0 while callable(val): i += 1 if i >= OVERFLOW:", "if format_op[0] == \"$\": format_op = \"${0:\"+format_op[1:]+\"}\" else: format_op = \"{0:\"+format_op+\"}\" value =", "if i >= OVERFLOW: break val = val(context, root) return val except Exception", "= value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\ .replace(\"\\\"\", \"&quot;\") \\", "i >= OVERFLOW: break val = val(context, root) return val except Exception as", "context, root, handle_exception=None): if not context: context = {} try: val = func", "format_op, escape_html=False): if value is None: return \"\" if format_op: if format_op in", "isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context,", "= collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES = _NT_types(*list(_types.values())) del _types, _NT_types def", "\"NULL\": 0, \"NONE\": 0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\":", "return \"\" if format_op: if format_op in (\"raw\", \"html\"): value = str(value) escape_html", "enumerate(value): if not i or (not c.isspace() and value[i-1].isspace()): new_value += c.upper() else:", "\"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3, \"DICTIONARY\": 3, \"FUNCTION\": 4 } _NT_types =", "0, \"NONE\": 0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2,", "is None: return TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping): return TYPES.DICTIONARY", "try: val = func i = 0 while callable(val): i += 1 if", "str(value).upper() elif format_op in (\"lower\",): value = str(value).lower() elif format_op == \"capitalize\": value", "val except Exception as e: if not handle_exception: raise e return handle_exception(e) def", "and not isinstance(test, str) def type_of(value): if value is None: return TYPES.NONE if", "if value is None: return TYPES.NONE if is_array(value): return TYPES.ARRAY if isinstance(value, collections.abc.Mapping):", "and value[i-1].isspace()): new_value += c.upper() else: new_value += c value = new_value else:", "escape_html: value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\ .replace(\"\\\"\",", "_NT_types(*list(_types.values())) del _types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test, str)", "context: context = {} try: val = func i = 0 while callable(val):", "0 while callable(val): i += 1 if i >= OVERFLOW: break val =", "new_value += c.upper() else: new_value += c value = new_value else: if format_op[0]", "evalf(func, context, root, handle_exception=None): if not context: context = {} try: val =", "isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def type_of(value): if value is None: return", "val(context, root) return val except Exception as e: if not handle_exception: raise e", "value = str(value) if escape_html: value = value.replace(\"&\", \"&amp;\") \\ .replace(\"<\", \"&lt;\") \\", "OVERFLOW: break val = val(context, root) return val except Exception as e: if", "in (\"raw\", \"html\"): value = str(value) escape_html = False elif format_op == \"encode\":", "del _types, _NT_types def is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def", "in (\"allcaps\", \"caps\", \"upper\"): value = str(value).upper() elif format_op in (\"lower\",): value =", "format_op in (\"lower\",): value = str(value).lower() elif format_op == \"capitalize\": value = str(value)", "= { \"UNDEFINED\": -1, \"NULL\": 0, \"NONE\": 0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\":", "str) def type_of(value): if value is None: return TYPES.NONE if is_array(value): return TYPES.ARRAY", "def is_array(test): return isinstance(test, collections.abc.Sequence) and not isinstance(test, str) def type_of(value): if value", "i, c in enumerate(value): if not i or (not c.isspace() and value[i-1].isspace()): new_value", "collections.abc.Mapping): return TYPES.DICTIONARY if callable(value): return TYPES.FUNCTION return TYPES.VALUE def evalf(func, context, root,", "value = str(value) escape_html = True elif format_op in (\"allcaps\", \"caps\", \"upper\"): value", "context = {} try: val = func i = 0 while callable(val): i", "value[i-1].isspace()): new_value += c.upper() else: new_value += c value = new_value else: if", "0, \"VALUE\": 1, \"STRING\": 1, \"NUMBER\": 1, \"ARRAY\": 2, \"LIST\": 2, \"OBJECT\": 3,", "value is None: return \"\" if format_op: if format_op in (\"raw\", \"html\"): value", "return val except Exception as e: if not handle_exception: raise e return handle_exception(e)", "c.upper() else: new_value += c value = new_value else: if format_op[0] == \"$\":", "\\ .replace(\"<\", \"&lt;\") \\ .replace(\">\", \"&gt;\") \\ .replace(\"\\\"\", \"&quot;\") \\ .replace(\"'\", \"&#039;\") return", "3, \"FUNCTION\": 4 } _NT_types = collections.namedtuple(\"_NT_TYPES\", list(_types.keys())) OVERFLOW = 99 TYPES =" ]
[ "convention=convention, calendar=calendar ) else: maturity_ = mty_ dt = maturity_ while dt.serial >", "52, 'M': 12, 'Y': 1} # useful for sorting def __init__(self, txt): firstNum", "{'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W':", "* from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W':", "[pydate_from_qldate(dt) for dt in sched] return sched @property def term(self): ''' Length of", "dt = maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True)", "= {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365,", "firstNum = True firstCh = True numTxt = \"\" unit=\"Y\" for i in", "__init__(self, txt): firstNum = True firstCh = True numTxt = \"\" unit=\"Y\" for", "int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ =", "1} # useful for sorting def __init__(self, txt): firstNum = True firstCh =", "stub. ''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = [] if type(maturity_)", "sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy:", "import date from pybg.enums import TimeUnits from pybg.quantlib.time.api import * from pybg.ql import", "on the given frequency. ''' return int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted,", "settlement and maturity. ''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle,", "unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns", "short front stub. ''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = []", "return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of", "maturity. ''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if", "settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a", "dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched =", "@modified: July 2012 to replace SWIG Quantlib bindings with pyQL Cython code. '''", "<NAME> @copyright: BG Research LLC, 2011 @modified: July 2012 to replace SWIG Quantlib", "from settleDate to maturity with a short front stub. ''' settle_ = qldate_from_pydate(settle_)", "qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = [] if type(maturity_) == str and not", "\"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer periods in the tenor", "code. ''' from datetime import date from pybg.enums import TimeUnits from pybg.quantlib.time.api import", "''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = [] if type(maturity_) ==", "Research LLC, 2011 @modified: July 2012 to replace SWIG Quantlib bindings with pyQL", "{'D': 365, 'W': 52, 'M': 12, 'Y': 1} # useful for sorting def", "self.timeunit, convention=convention) return date_ if not aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_,", "pass if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days)", "for i in str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit(): numTxt = numTxt", "dt.serial) if aspy: sched = [pydate_from_qldate(dt) for dt in sched] return sched @property", "from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks,", "self.length = int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls,", "def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length", "front stub. ''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = [] if", "if firstCh and (i.upper() in self._tenorUnits): unit = i.upper() firstCh = False else:", "float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit) @property def tuple(self): return (self.length,", "date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if not aspy else pydate_from_qldate(date_)", "length_, self.timeunit, convention=convention) return date_ if not aspy else pydate_from_qldate(date_) def schedule(self, settle_,", "self._tenorUnits): unit = i.upper() firstCh = False else: pass if(firstNum): numTxt=\"0\" self.length =", "def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the tenor associated with settlement and", "'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52, 'M': 12, 'Y':", "while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda", "= \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return str(self.length)+self.unit def", "mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ = mty_ dt =", ") else: maturity_ = mty_ dt = maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt,", "to replace SWIG Quantlib bindings with pyQL Cython code. ''' from datetime import", "365, 'W': 52, 'M': 12, 'Y': 1} # useful for sorting def __init__(self,", "with pyQL Cython code. ''' from datetime import date from pybg.enums import TimeUnits", "= [pydate_from_qldate(dt) for dt in sched] return sched @property def term(self): ''' Length", "else: maturity_ = mty_ dt = maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention))", "maturity) if years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return", "not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ = mty_ dt", "'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52, 'M':", "# useful for sorting def __init__(self, txt): firstNum = True firstCh = True", "maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_ >= 1.0: t =", "= calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if not aspy else pydate_from_qldate(date_) def", "= \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def", "date from pybg.enums import TimeUnits from pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate,", "convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule of", "firstCh = False else: pass if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit = unit", "in the tenor based on the given frequency. ''' return int(self.term * int(frequency))", "\"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self):", "years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit)", "= numTxt + i if firstNum: firstNum = False elif i.isalpha(): if firstCh", "= maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else:", "= self.length if not reverse else -self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention)", "not reverse else -self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if", "TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52, 'M': 12, 'Y': 1} # useful", "convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched", "Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength =", "of dates from settleDate to maturity with a short front stub. ''' settle_", "-self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if not aspy else", "in str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit(): numTxt = numTxt + i", "import TimeUnits from pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object):", "TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52, 'M': 12,", "Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the tenor associated with", "settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_ >=", "* int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_", "settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if", "if firstNum: firstNum = False elif i.isalpha(): if firstCh and (i.upper() in self._tenorUnits):", "maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule", "def __init__(self, txt): firstNum = True firstCh = True numTxt = \"\" unit=\"Y\"", "= i.upper() firstCh = False else: pass if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit", "i in str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit(): numTxt = numTxt +", "'''Returns the number of integer periods in the tenor based on the given", "= self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the tenor", "with a short front stub. ''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched", "maturity_ = mty_ dt = maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt", "daycount.year_fraction(settle, maturity) if years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\"))", "qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else:", "maturity, daycount=ActualActual()): ''' Returns the tenor associated with settlement and maturity. ''' settle", "fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the tenor associated with settlement and maturity.", "True numTxt = \"\" unit=\"Y\" for i in str(txt).replace(' ', ''): if i.isalnum():", "pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D':", "from datetime import date from pybg.enums import TimeUnits from pybg.quantlib.time.api import * from", "TimeUnits from pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits", "str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit(): numTxt = numTxt + i if", "def term(self): ''' Length of tenor in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit,", "of integer periods in the tenor based on the given frequency. ''' return", "sched = [pydate_from_qldate(dt) for dt in sched] return sched @property def term(self): '''", "if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod", "unit=\"Y\" for i in str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit(): numTxt =", "''' Returns the tenor associated with settlement and maturity. ''' settle = qldate_from_pydate(settle)", "t = \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return str(self.length)+self.unit", "Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ = mty_ dt = maturity_ while dt.serial", "str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer", "\"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self,", "return cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual):", "= True numTxt = \"\" unit=\"Y\" for i in str(txt).replace(' ', ''): if", "self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the", "sched = [] if type(maturity_) == str and not mty_: maturity_ = Tenor(maturity_).advance(settle_,", "periods in the tenor based on the given frequency. ''' return int(self.term *", ">= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self):", "years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def", "qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years}", "daycount=ActualActual()): ''' Returns the tenor associated with settlement and maturity. ''' settle =", "and maturity. ''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity)", "schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives", "the number of integer periods in the tenor based on the given frequency.", "aspy: sched = [pydate_from_qldate(dt) for dt in sched] return sched @property def term(self):", "import * from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days,", "settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = [] if type(maturity_) == str", "if type(maturity_) == str and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar )", "self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt) for", "reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt) for dt", "numTxt = \"\" unit=\"Y\" for i in str(txt).replace(' ', ''): if i.isalnum(): if", "calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if not aspy else pydate_from_qldate(date_) def schedule(self,", "LLC, 2011 @modified: July 2012 to replace SWIG Quantlib bindings with pyQL Cython", "Cython code. ''' from datetime import date from pybg.enums import TimeUnits from pybg.quantlib.time.api", "type(maturity_) == str and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else:", "else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self): return", "the tenor based on the given frequency. ''' return int(self.term * int(frequency)) def", "of tenor in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self):", "convention=convention) return date_ if not aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted,", "= qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched = [] if type(maturity_) == str and", "mty_ dt = maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt,", "= unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): '''", "import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months,", "= \"\" unit=\"Y\" for i in str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit():", "convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length if not reverse", "Tenor class @author: <NAME> @copyright: BG Research LLC, 2011 @modified: July 2012 to", "Quantlib bindings with pyQL Cython code. ''' from datetime import date from pybg.enums", "numTxt + i if firstNum: firstNum = False elif i.isalpha(): if firstCh and", "@copyright: BG Research LLC, 2011 @modified: July 2012 to replace SWIG Quantlib bindings", "else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt) for dt in", "return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit) @property def", "tenor in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return", "def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y')", "= False elif i.isalpha(): if firstCh and (i.upper() in self._tenorUnits): unit = i.upper()", "self.length if not reverse else -self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return", "if aspy: sched = [pydate_from_qldate(dt) for dt in sched] return sched @property def", "dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt:", "if i.isdigit(): numTxt = numTxt + i if firstNum: firstNum = False elif", "''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit) @property", "@classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the tenor associated with settlement", "dates from settleDate to maturity with a short front stub. ''' settle_ =", "pyQL Cython code. ''' from datetime import date from pybg.enums import TimeUnits from", "return sched @property def term(self): ''' Length of tenor in years. ''' return", "@property def term(self): ''' Length of tenor in years. ''' return float(self.length) /", "with settlement and maturity. ''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ =", "useful for sorting def __init__(self, txt): firstNum = True firstCh = True numTxt", "12, 'Y': 1} # useful for sorting def __init__(self, txt): firstNum = True", "sched @property def term(self): ''' Length of tenor in years. ''' return float(self.length)", "firstCh and (i.upper() in self._tenorUnits): unit = i.upper() firstCh = False else: pass", "= qldate_from_pydate(maturity_) sched = [] if type(maturity_) == str and not mty_: maturity_", "str and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ =", "and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ = mty_", "if years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t)", "t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\"", "+ i if firstNum: firstNum = False elif i.isalpha(): if firstCh and (i.upper()", "int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity,", "__repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer periods in", "def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer periods", "float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit) @property def tuple(self):", "tenor based on the given frequency. ''' return int(self.term * int(frequency)) def advance(self,", "maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates from settleDate to maturity", "sched] return sched @property def term(self): ''' Length of tenor in years. '''", "the given frequency. ''' return int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(),", "= [] if type(maturity_) == str and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention,", "True firstCh = True numTxt = \"\" unit=\"Y\" for i in str(txt).replace(' ',", "in sched] return sched @property def term(self): ''' Length of tenor in years.", "aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length if not reverse else -self.length date_", "2011 @modified: July 2012 to replace SWIG Quantlib bindings with pyQL Cython code.", "__str__(self): return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number", "sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt) for dt in sched]", "given frequency. ''' return int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False,", "mty_ = qldate_from_pydate(maturity_) sched = [] if type(maturity_) == str and not mty_:", "qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_ >= 1.0: t", "Length of tenor in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def", "SWIG Quantlib bindings with pyQL Cython code. ''' from datetime import date from", "maturity with a short front stub. ''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_)", "a short front stub. ''' settle_ = qldate_from_pydate(settle_) mty_ = qldate_from_pydate(maturity_) sched =", "a schedule of dates from settleDate to maturity with a short front stub.", "unit = i.upper() firstCh = False else: pass if(firstNum): numTxt=\"0\" self.length = int(numTxt)", "'W': 52, 'M': 12, 'Y': 1} # useful for sorting def __init__(self, txt):", "i.isalnum(): if i.isdigit(): numTxt = numTxt + i if firstNum: firstNum = False", "date_ = qldate_from_pydate(date_) length_ = self.length if not reverse else -self.length date_ =", "qldate_from_pydate(maturity_) sched = [] if type(maturity_) == str and not mty_: maturity_ =", "from pybg.enums import TimeUnits from pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate, qldate_from_pydate", "pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M':", "replace SWIG Quantlib bindings with pyQL Cython code. ''' from datetime import date", "_tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D':", "'10Y') gives a schedule of dates from settleDate to maturity with a short", "based on the given frequency. ''' return int(self.term * int(frequency)) def advance(self, date_,", "2012 to replace SWIG Quantlib bindings with pyQL Cython code. ''' from datetime", "numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer periods in the tenor based on", "for dt in sched] return sched @property def term(self): ''' Length of tenor", "date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length if not", "/ float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit) @property def tuple(self): return", "tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates from settleDate to maturity with a", "calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length if not reverse else", "return int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ =", "> settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial)", "pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate,", "not aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate,", "class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength", "self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()): ''' Returns the tenor associated", "settle, maturity, daycount=ActualActual()): ''' Returns the tenor associated with settlement and maturity. '''", "= qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\"))", "= qldate_from_pydate(date_) length_ = self.length if not reverse else -self.length date_ = calendar.advance(date_,", "(i.upper() in self._tenorUnits): unit = i.upper() firstCh = False else: pass if(firstNum): numTxt=\"0\"", "TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52,", "BG Research LLC, 2011 @modified: July 2012 to replace SWIG Quantlib bindings with", "firstNum = False elif i.isalpha(): if firstCh and (i.upper() in self._tenorUnits): unit =", "''' from datetime import date from pybg.enums import TimeUnits from pybg.quantlib.time.api import *", "i.isdigit(): numTxt = numTxt + i if firstNum: firstNum = False elif i.isalpha():", "frequency. ''' return int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True):", "1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t = \"\".join((str(int(round(years_*12.))),\"M\")) return cls(t) def __str__(self): return", "txt): firstNum = True firstCh = True numTxt = \"\" unit=\"Y\" for i", "numTxt=\"0\" self.length = int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def", "term(self): ''' Length of tenor in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0))", "False else: pass if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit = unit self.timeunit =", "'Y': 1} # useful for sorting def __init__(self, txt): firstNum = True firstCh", "_tenorLength = {'D': 365, 'W': 52, 'M': 12, 'Y': 1} # useful for", "return date_ if not aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(),", "= mty_ dt = maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt =", "dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt) for dt in sched] return sched", "== str and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_", "def __str__(self): return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the", "= self.advance(dt, reverse=True) else: sched.append(settle_) sched.sort(key=lambda dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt)", "= {'D': 365, 'W': 52, 'M': 12, 'Y': 1} # useful for sorting", "i.upper() firstCh = False else: pass if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit =", "1.0)) @property def QLPeriod(self): return Period(self.length, self.timeunit) @property def tuple(self): return (self.length, self.timeunit)", "'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52, 'M': 12, 'Y': 1} #", "in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property def QLPeriod(self): return Period(self.length,", "sorting def __init__(self, txt): firstNum = True firstCh = True numTxt = \"\"", "elif i.isalpha(): if firstCh and (i.upper() in self._tenorUnits): unit = i.upper() firstCh =", "[] if type(maturity_) == str and not mty_: maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar", "else -self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if not aspy", "if i.isalnum(): if i.isdigit(): numTxt = numTxt + i if firstNum: firstNum =", "gives a schedule of dates from settleDate to maturity with a short front", "''' return int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_", "length_ = self.length if not reverse else -self.length date_ = calendar.advance(date_, length_, self.timeunit,", "return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer periods in the", "def numberOfPeriods(self, frequency=Semiannual): '''Returns the number of integer periods in the tenor based", "= False else: pass if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit = unit self.timeunit", "if not reverse else -self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return date_", "in self._tenorUnits): unit = i.upper() firstCh = False else: pass if(firstNum): numTxt=\"0\" self.length", "int(self.term * int(frequency)) def advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_)", "advance(self, date_, convention=Unadjusted, calendar=TARGET(), reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length if", "dt in sched] return sched @property def term(self): ''' Length of tenor in", "'M': 12, 'Y': 1} # useful for sorting def __init__(self, txt): firstNum =", "years_ = daycount.year_fraction(settle, maturity) if years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t", "aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates from", "firstCh = True numTxt = \"\" unit=\"Y\" for i in str(txt).replace(' ', ''):", "self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle, maturity, daycount=ActualActual()):", "maturity_ while dt.serial > settle_.serial: sched.append(calendar.adjust(dt, convention)) dt = self.advance(dt, reverse=True) else: sched.append(settle_)", "to maturity with a short front stub. ''' settle_ = qldate_from_pydate(settle_) mty_ =", "else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or", "cls(t) def __str__(self): return str(self.length)+self.unit def __repr__(self): return \"<Tenor:\"+self.__str__()+\">\" def numberOfPeriods(self, frequency=Semiannual): '''Returns", "tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates from settleDate to", "or tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates from settleDate to maturity with", "maturity_ = Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ = mty_ dt = maturity_", "= Tenor(maturity_).advance(settle_, convention=convention, calendar=calendar ) else: maturity_ = mty_ dt = maturity_ while", "bindings with pyQL Cython code. ''' from datetime import date from pybg.enums import", "schedule of dates from settleDate to maturity with a short front stub. '''", "if not aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): '''", "i if firstNum: firstNum = False elif i.isalpha(): if firstCh and (i.upper() in", "''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_", "from pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits =", "settleDate to maturity with a short front stub. ''' settle_ = qldate_from_pydate(settle_) mty_", "integer periods in the tenor based on the given frequency. ''' return int(self.term", "number of integer periods in the tenor based on the given frequency. '''", "@author: <NAME> @copyright: BG Research LLC, 2011 @modified: July 2012 to replace SWIG", "firstNum: firstNum = False elif i.isalpha(): if firstCh and (i.upper() in self._tenorUnits): unit", "= qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_ = daycount.year_fraction(settle, maturity) if years_ >= 1.0:", "', ''): if i.isalnum(): if i.isdigit(): numTxt = numTxt + i if firstNum:", "sched.sort(key=lambda dt: dt.serial) if aspy: sched = [pydate_from_qldate(dt) for dt in sched] return", "Returns the tenor associated with settlement and maturity. ''' settle = qldate_from_pydate(settle) maturity", "associated with settlement and maturity. ''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity) years_", "datetime import date from pybg.enums import TimeUnits from pybg.quantlib.time.api import * from pybg.ql", "for sorting def __init__(self, txt): firstNum = True firstCh = True numTxt =", "qldate_from_pydate(date_) length_ = self.length if not reverse else -self.length date_ = calendar.advance(date_, length_,", "class @author: <NAME> @copyright: BG Research LLC, 2011 @modified: July 2012 to replace", "= daycount.year_fraction(settle, maturity) if years_ >= 1.0: t = \"\".join((str(int(round(years_))),\"Y\")) else: t =", "= int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit, Days) @classmethod def fromdates(cls, settle,", "''' Length of tenor in years. ''' return float(self.length) / float(self._tenorLength.get(self.unit, 1.0)) @property", "i.isalpha(): if firstCh and (i.upper() in self._tenorUnits): unit = i.upper() firstCh = False", "else: pass if(firstNum): numTxt=\"0\" self.length = int(numTxt) self.unit = unit self.timeunit = self._tenorUnits.get(self.unit,", "reverse else -self.length date_ = calendar.advance(date_, length_, self.timeunit, convention=convention) return date_ if not", "= True firstCh = True numTxt = \"\" unit=\"Y\" for i in str(txt).replace('", "tenor associated with settlement and maturity. ''' settle = qldate_from_pydate(settle) maturity = qldate_from_pydate(maturity)", "TimeUnits.Months, 'Y': TimeUnits.Years} _tenorLength = {'D': 365, 'W': 52, 'M': 12, 'Y': 1}", "calendar=calendar ) else: maturity_ = mty_ dt = maturity_ while dt.serial > settle_.serial:", "reverse=False, aspy=True): date_ = qldate_from_pydate(date_) length_ = self.length if not reverse else -self.length", "the tenor associated with settlement and maturity. ''' settle = qldate_from_pydate(settle) maturity =", "\"\" unit=\"Y\" for i in str(txt).replace(' ', ''): if i.isalnum(): if i.isdigit(): numTxt", "''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates from settleDate", "date_ if not aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True):", "aspy else pydate_from_qldate(date_) def schedule(self, settle_, maturity_, convention=Unadjusted, calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate)", "and (i.upper() in self._tenorUnits): unit = i.upper() firstCh = False else: pass if(firstNum):", "frequency=Semiannual): '''Returns the number of integer periods in the tenor based on the", "July 2012 to replace SWIG Quantlib bindings with pyQL Cython code. ''' from", "pydate_from_qldate, qldate_from_pydate class Tenor(object): _tenorUnits = {'D': TimeUnits.Days, 'W': TimeUnits.Weeks, 'M': TimeUnits.Months, 'Y':", "False elif i.isalpha(): if firstCh and (i.upper() in self._tenorUnits): unit = i.upper() firstCh", "''): if i.isalnum(): if i.isdigit(): numTxt = numTxt + i if firstNum: firstNum", "pybg.enums import TimeUnits from pybg.quantlib.time.api import * from pybg.ql import pydate_from_qldate, qldate_from_pydate class", "''' Tenor class @author: <NAME> @copyright: BG Research LLC, 2011 @modified: July 2012", "numTxt = numTxt + i if firstNum: firstNum = False elif i.isalpha(): if", "calendar=TARGET(), aspy=True): ''' tenor('3m').schedule(settleDate, maturityDate) or tenor('3m').schedule(settleDate, '10Y') gives a schedule of dates" ]
[ "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\" %", "self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\":", "= 0 self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf =", "= LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self):", "from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter =", "\"GET\", self.fetched_counter return msg def on_error(*args): print \"Basic get failed:\", args if no_ack:", "this file except in compliance with the License. # You may obtain a", "= 1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\": 10,", "\"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print", "self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter", "LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper", "yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector =", "None) self.show_stoper = None print \"GET\", self.fetched_counter return msg def on_error(*args): print \"Basic", "maybeDeferred from twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self):", "\"Basic get failed:\", args if no_ack: ack_callback = lambda msg: msg else: ack_callback", "def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message)", "ANY KIND, either express or implied. # See the License for the specific", "\"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}},", "= lambda msg: msg else: ack_callback = lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name,", "@inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter =", "txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter = 0", "no_ack=True): def on_message(msg): print msg if msg.method.name != \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body,", "and # limitations under the License. import sys import random from twisted.internet.task import", "self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print \"GET\", self.fetched_counter return", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "twisted.internet import reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial", "= LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False)", "= { \"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\":", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "<gh_stars>0 # Copyright 2015 <NAME> <EMAIL> # # Licensed under the Apache License,", "self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback,", "None print \"GET\", self.fetched_counter return msg def on_error(*args): print \"Basic get failed:\", args", "@inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter =", "get_message(self, no_ack=True): def on_message(msg): print msg if msg.method.name != \"get-empty\": self.fetched_counter += 1", "Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks", "1 self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5,", "no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred()", "OF ANY KIND, either express or implied. # See the License for the", "self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01)", "the specific language governing permissions and # limitations under the License. import sys", "False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher =", "= lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def", "\"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\":", "\"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print \"GET\",", "= Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper", "yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01)", "False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\":", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "= LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def", "hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print", "self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "if no_ack: ack_callback = lambda msg: msg else: ack_callback = lambda msg: self.tx.basic_ack(msg)", "yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message)", "@inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter =", "from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import unittest from", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name,", "\"queue\": self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name,", "d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter if", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "required by applicable law or agreed to in writing, software # distributed under", "self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred()", "applicable law or agreed to in writing, software # distributed under the License", "\"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx =", "self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False)", "or agreed to in writing, software # distributed under the License is distributed", "self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\",", "d def publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send:", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\": {", "False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx", "# limitations under the License. import sys import random from twisted.internet.task import deferLater,", "\"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\":", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\",", "writing, software # distributed under the License is distributed on an \"AS IS\"", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= None print \"GET\", self.fetched_counter return msg def on_error(*args): print \"Basic get failed:\",", "License. # You may obtain a copy of the License at # #", "test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector", "if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred", "self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield", "sys import random from twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet import reactor", "twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks,", "compliance with the License. # You may obtain a copy of the License", "d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test message\", None)", "self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred()", "self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print msg if msg.method.name !=", "for the specific language governing permissions and # limitations under the License. import", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "\"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False, \"exclusive\": False,", "random from twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet import reactor from twisted.internet.defer", "Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01,", "self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print msg if msg.method.name", "= Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5)", "False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher =", "def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message,", "\"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\":", "not use this file except in compliance with the License. # You may", "setUp(self): self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\"", "\"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self,", "Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks", "= self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test message\", None) print", "\"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name,", "language governing permissions and # limitations under the License. import sys import random", "License, Version 2.0 (the \"License\"); # you may not use this file except", "sys.maxint) rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\":", "import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import unittest from txamqpr import", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "message\", None) print \"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1", "\"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self):", "# you may not use this file except in compliance with the License.", "msg def on_error(*args): print \"Basic get failed:\", args if no_ack: ack_callback = lambda", "self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def", "self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter", "True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\":", "agreed to in writing, software # distributed under the License is distributed on", "self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred()", "(the \"License\"); # you may not use this file except in compliance with", "LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper =", "self.show_stoper.callback, None) self.show_stoper = None print \"GET\", self.fetched_counter return msg def on_error(*args): print", "# Unless required by applicable law or agreed to in writing, software #", "by applicable law or agreed to in writing, software # distributed under the", "= Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper", "License. import sys import random from twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print msg if msg.method.name != \"get-empty\":", "Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01)", "0 self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint)", "= \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\": { \"exchange\":", "print \"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def", "test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False)", "file except in compliance with the License. # You may obtain a copy", "no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper def tearDown(self): self.tx.stopTrying()", "self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper def tearDown(self): self.tx.stopTrying() self.tx.p.transport.loseConnection()", "self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01,", "unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter", "License for the specific language governing permissions and # limitations under the License.", "self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print \"GET\", self.fetched_counter return msg def", "to in writing, software # distributed under the License is distributed on an", "lambda msg: msg else: ack_callback = lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack)", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "\"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print msg if", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test", "self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector =", "self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper =", "msg: msg else: ack_callback = lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback)", "= LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper", "{ \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name,", "= Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01)", "reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import unittest", "LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper =", "or implied. # See the License for the specific language governing permissions and", "self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test message\",", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "msg.method.name != \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self, \"disconnector\"):", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "ack_callback = lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d", "if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None", "publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter", "= 0 self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "return d def publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter if self.published_counter >=", "self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher", "None) print \"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "\"type\": \"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False, \"exclusive\":", "= LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield", "txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send =", "LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "from twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter", "MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name", "self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5)", "under the License. import sys import random from twisted.internet.task import deferLater, LoopingCall, Clock", "yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message)", "def on_message(msg): print msg if msg.method.name != \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test", "1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter", "use this file except in compliance with the License. # You may obtain", "print msg if msg.method.name != \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\") else:", "def on_error(*args): print \"Basic get failed:\", args if no_ack: ack_callback = lambda msg:", "import random from twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet import reactor from", "import unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0", "self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "limitations under the License. import sys import random from twisted.internet.task import deferLater, LoopingCall,", "ack_callback = lambda msg: msg else: ack_callback = lambda msg: self.tx.basic_ack(msg) d =", "2.0 (the \"License\"); # you may not use this file except in compliance", "from twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet import reactor from twisted.internet.defer import", "def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message)", "= LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self):", "twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import unittest from txamqpr", "on_message(msg): print msg if msg.method.name != \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\")", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher =", "# # Unless required by applicable law or agreed to in writing, software", "\"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\":", "message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper", "express or implied. # See the License for the specific language governing permissions", "twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter =", "{\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf)", "import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send", "self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher", "self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield", "self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks", "self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect)", "either express or implied. # See the License for the specific language governing", "self.show_stoper = None print \"GET\", self.fetched_counter return msg def on_error(*args): print \"Basic get", "+= 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message)", "self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False,", "self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print \"GET\", self.fetched_counter", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print msg if msg.method.name != \"get-empty\": self.fetched_counter", "import deferLater, LoopingCall, Clock from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks, Deferred,", "lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self):", "no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter", "def setUp(self): self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name =", "returnValue, DeferredList, maybeDeferred from twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase):", "the License. # You may obtain a copy of the License at #", "<EMAIL> # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", ">= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper =", "{ \"queue\": self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\":", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "print \"Basic get failed:\", args if no_ack: ack_callback = lambda msg: msg else:", "LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "d.addErrback(on_error) return d def publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter if self.published_counter", "self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter", "LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield", "self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield", "no_ack: ack_callback = lambda msg: msg else: ack_callback = lambda msg: self.tx.basic_ack(msg) d", "1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\":", "the License. import sys import random from twisted.internet.task import deferLater, LoopingCall, Clock from", "self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def", "self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher", "= LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper def", "self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher", "\"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def", "yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector", "with the License. # You may obtain a copy of the License at", "self.fetched_counter return msg def on_error(*args): print \"Basic get failed:\", args if no_ack: ack_callback", "msg else: ack_callback = lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error)", "from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "Clock from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred", "get failed:\", args if no_ack: ack_callback = lambda msg: msg else: ack_callback =", "{ \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True):", "inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory", "yield self.show_stoper @inlineCallbacks def test_pub_and_sub_while_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message)", "self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper", "DeferredList, maybeDeferred from twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory class MyTestCase(unittest.TestCase): def", "law or agreed to in writing, software # distributed under the License is", "= LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred", "the License for the specific language governing permissions and # limitations under the", "self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield", "def get_message(self, no_ack=True): def on_message(msg): print msg if msg.method.name != \"get-empty\": self.fetched_counter +=", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if", "import sys import random from twisted.internet.task import deferLater, LoopingCall, Clock from twisted.internet import", "10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\": True}, \"queue_declare_conf\": {", "self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\":", "in compliance with the License. # You may obtain a copy of the", "2015 <NAME> <EMAIL> # # Licensed under the Apache License, Version 2.0 (the", "def publish_message(self): self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop()", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print \"GET\", self.fetched_counter return msg def on_error(*args):", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "permissions and # limitations under the License. import sys import random from twisted.internet.task", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "governing permissions and # limitations under the License. import sys import random from", "LoopingCall, Clock from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList,", "0 self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf = {", "\"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop()", "failed:\", args if no_ack: ack_callback = lambda msg: msg else: ack_callback = lambda", "See the License for the specific language governing permissions and # limitations under", "Copyright 2015 <NAME> <EMAIL> # # Licensed under the Apache License, Version 2.0", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper =", "{ \"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False, \"auto_delete\": True},", "self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect)", "def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message,", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "self.tx.basic_publish(\"Test message\", None) print \"PUT\", self.published_counter if self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter +=", "LoopingCall(self.get_message) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack(self):", "self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher =", "\"Test message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None)", "# Copyright 2015 <NAME> <EMAIL> # # Licensed under the Apache License, Version", "LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield", "LoopingCall(self.get_message, no_ack=False) self.disconnector = LoopingCall(self.tx._disconnect) self.disconnector.start(5) self.publisher.start(0.01) self.message_getter.start(0.01, False) yield self.show_stoper def tearDown(self):", "<NAME> <EMAIL> # # Licensed under the Apache License, Version 2.0 (the \"License\");", "if self.show_stoper: reactor.callLater(5, self.show_stoper.callback, None) self.show_stoper = None print \"GET\", self.fetched_counter return msg", "\"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}}", "!= \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop()", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "180000}}, \"queue_binding_conf\": { \"exchange\": self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def", "test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message, no_ack=False)", "+= 1 self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self, \"disconnector\"): self.disconnector.stop() self.message_getter.stop() if self.show_stoper:", "print \"GET\", self.fetched_counter return msg def on_error(*args): print \"Basic get failed:\", args if", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01, False)", "False, \"auto_delete\": True}, \"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\":", "self.published_counter >= self.total_messages_to_send: self.publisher.stop() self.published_counter += 1 @inlineCallbacks def test_pub_and_sub(self): yield self.tx.deferred self.show_stoper", "args if no_ack: ack_callback = lambda msg: msg else: ack_callback = lambda msg:", "Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import unittest from txamqpr import txAMQPReconnectingFactory class", "on_error(*args): print \"Basic get failed:\", args if no_ack: ack_callback = lambda msg: msg", "if msg.method.name != \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\") else: if hasattr(self,", "class MyTestCase(unittest.TestCase): def setUp(self): self.fetched_counter = 0 self.published_counter = 0 self.total_messages_to_send = 1000", "specific language governing permissions and # limitations under the License. import sys import", "self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg): print msg", "= LoopingCall(self.get_message, no_ack=False) self.publisher.start(0.01) self.message_getter.start(0.01) yield self.show_stoper @inlineCallbacks def test_pub_and_sub_and_ack_with_disconnect(self): yield self.tx.deferred self.show_stoper", "% random.randint(0, sys.maxint) rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\":", "test_pub_and_sub(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01)", "yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter = LoopingCall(self.get_message) self.publisher.start(0.01) self.message_getter.start(0.01,", "self.published_counter = 0 self.total_messages_to_send = 1000 self.random_name = \"test-txamqpr-client-%s\" % random.randint(0, sys.maxint) rabbitmq_conf", "msg if msg.method.name != \"get-empty\": self.fetched_counter += 1 self.assertEqual(msg.content.body, \"Test message\") else: if", "return msg def on_error(*args): print \"Basic get failed:\", args if no_ack: ack_callback =", "else: ack_callback = lambda msg: self.tx.basic_ack(msg) d = self.tx.basic_get(self.random_name, no_ack) d.addCallback(on_message).addCallback(ack_callback) d.addErrback(on_error) return", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "@inlineCallbacks def test_pub_and_sub_and_ack(self): yield self.tx.deferred self.show_stoper = Deferred() self.publisher = LoopingCall(self.publish_message) self.message_getter =", "import reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue, DeferredList, maybeDeferred from twisted.trial import", "rabbitmq_conf = { \"prefetch\": 10, \"exchange_conf\": { \"exchange\": self.random_name, \"type\": \"fanout\", \"durable\": False,", "self.random_name, \"queue\": self.random_name, \"routing_key\": self.random_name}} self.tx = txAMQPReconnectingFactory(**rabbitmq_conf) def get_message(self, no_ack=True): def on_message(msg):", "deferLater, LoopingCall, Clock from twisted.internet import reactor from twisted.internet.defer import inlineCallbacks, Deferred, returnValue,", "\"queue_declare_conf\": { \"queue\": self.random_name, \"durable\": False, \"exclusive\": False, \"arguments\": {\"x-expires\": 180000}}, \"queue_binding_conf\": {" ]
[ "'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" +", "{ 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\"", "arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" + hashlib.md5(", "details = { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE", "import settings def get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'),", "def get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'),", "hashlib import settings def get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI':", "'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" + hashlib.md5( arguments.get('canvas')).hexdigest()", "<reponame>dlcs/river-annotations<filename>default_resolver.py import hashlib import settings def get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height':", "= { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE +", "import hashlib import settings def get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height': arguments.get('height'),", "get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI':", "'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" + hashlib.md5( arguments.get('canvas')).hexdigest() + \"/{{line_number}}\" } return", "arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" + hashlib.md5( arguments.get('canvas')).hexdigest() +", "settings def get_anno_details(arguments): details = { 'width': arguments.get('width'), 'height': arguments.get('height'), 'canvasURI': arguments.get('canvas'), 'imageURI':", "'canvasURI': arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" + hashlib.md5( arguments.get('canvas')).hexdigest() + \"/{{line_number}}\"", "arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" + hashlib.md5( arguments.get('canvas')).hexdigest() + \"/{{line_number}}\" } return details", "arguments.get('canvas'), 'imageURI': arguments.get('image'), 'annotationBaseURI': settings.ANNOTATION_BASE + \"/anno/\" + hashlib.md5( arguments.get('canvas')).hexdigest() + \"/{{line_number}}\" }" ]
[ "MetaType import numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf", "np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self):", "= [0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt =", "as np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf class TestEM(unittest.TestCase): def", "numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf class TestEM(unittest.TestCase):", "= log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn, data) if __name__", "log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn", "size=2000).tolist() data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1],", "data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn =", "learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context", "parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll =", "print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn, data) if __name__ == '__main__': unittest.main()", "0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn, data)", "from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes", "= Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights =", "from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17)", "+ np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context =", "Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights = [0.8,", "EM_optimization from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import", "10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn", "spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt", "= np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data", "= learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn,", "tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType import numpy as np", "as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() +", "eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType import numpy", "ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights", "tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30,", "10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] *", "import MetaType import numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as", "import numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf class", "data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn, data) if __name__ == '__main__':", "from spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType import numpy as np from", "spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType import", "Gaussian import tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10,", "size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context", "Context from spn.structure.StatisticalTypes import MetaType import numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian", "data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian]", "learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from", "from spn.structure.StatisticalTypes import MetaType import numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian import", "spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base", "* data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn,", "[0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn,", "likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType import numpy as", "0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32)", "import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import", "class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10,", "data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn, data)", "test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data =", "import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss,", "unittest from spn.algorithms.EM import EM_optimization from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric,", "np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data =", "import Gaussian import tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data =", "from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph,", "import tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01,", "import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType", "spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data", "* data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights = [0.8, 0.2]", "import unittest from spn.algorithms.EM import EM_optimization from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import", "TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist()", "spn = learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights)", "np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1])", "def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data", "py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn, data) if", "np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1,", "spn.algorithms.EM import EM_optimization from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from", "spn.structure.Base import Context from spn.structure.StatisticalTypes import MetaType import numpy as np from spn.structure.leaves.parametric.Parametric", "spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf,", "from spn.algorithms.EM import EM_optimization from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn", "data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll", "data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data, ds_context)", "data = np.random.normal(10, 0.01, size=2000).tolist() + np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10))", "from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from", "import Context from spn.structure.StatisticalTypes import MetaType import numpy as np from spn.structure.leaves.parametric.Parametric import", "log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights) py_ll_opt = log_likelihood(spn, data) if __name__ ==", "spn.structure.StatisticalTypes import MetaType import numpy as np from spn.structure.leaves.parametric.Parametric import Gaussian import tensorflow", "tensorflow as tf class TestEM(unittest.TestCase): def test_optimization(self): np.random.seed(17) data = np.random.normal(10, 0.01, size=2000).tolist()", "= data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] * data.shape[1]) spn = learn_parametric(data,", "import EM_optimization from spn.algorithms.Inference import log_likelihood from spn.algorithms.LearningWrappers import learn_parametric, learn_mspn from spn.gpu.TensorFlow", "= np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL] * data.shape[1], parametric_types=[Gaussian] *", "spn.gpu.TensorFlow import spn_to_tf_graph, eval_tf, likelihood_loss, tf_graph_to_spn from spn.structure.Base import Context from spn.structure.StatisticalTypes import", "learn_parametric(data, ds_context) spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data)", "np.random.normal(30, 10, size=2000).tolist() data = np.array(data).reshape((-1, 10)) data = data.astype(np.float32) ds_context = Context(meta_types=[MetaType.REAL]", "ds_context) spn.weights = [0.8, 0.2] py_ll = log_likelihood(spn, data) print(spn.weights) EM_optimization(spn, data) print(spn.weights)" ]
[ "430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi, deviation)", "= 80 width = 1550 height = 1028 is_color_recognition_enabled = 0 roi =", "width = 1550 height = 1028 is_color_recognition_enabled = 0 roi = 430 deviation", "is_color_recognition_enabled = 0 roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled,", "api import object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please upgrade your tensorflow installation", "as tf from utils import backbone from api import object_counting_api if tf.__version__ <", "tf.__version__ < '1.4.0': raise ImportError('Please upgrade your tensorflow installation to v1.4.* or later!')", "= 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi, deviation) # counting", "deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi, deviation) #", "import backbone from api import object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please upgrade", "tf from utils import backbone from api import object_counting_api if tf.__version__ < '1.4.0':", "1028 is_color_recognition_enabled = 0 roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index,", "= 1550 height = 1028 is_color_recognition_enabled = 0 roi = 430 deviation =", "tensorflow installation to v1.4.* or later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index", "input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width", "tensorflow as tf from utils import backbone from api import object_counting_api if tf.__version__", "'1.4.0': raise ImportError('Please upgrade your tensorflow installation to v1.4.* or later!') input_video =", "= 0 roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps,", "your tensorflow installation to v1.4.* or later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph,", "roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height,", "utils import backbone from api import object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please", "or later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps =", "detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi, deviation) # counting all the objects", "1550 height = 1028 is_color_recognition_enabled = 0 roi = 430 deviation = 10", "height = 1028 is_color_recognition_enabled = 0 roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video,", "= 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi,", "category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550 height = 1028 is_color_recognition_enabled", "backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550 height = 1028 is_color_recognition_enabled = 0", "< '1.4.0': raise ImportError('Please upgrade your tensorflow installation to v1.4.* or later!') input_video", "v1.4.* or later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps", "import tensorflow as tf from utils import backbone from api import object_counting_api if", "raise ImportError('Please upgrade your tensorflow installation to v1.4.* or later!') input_video = \"./input_footage/trim.mp4\"", "fps = 80 width = 1550 height = 1028 is_color_recognition_enabled = 0 roi", "80 width = 1550 height = 1028 is_color_recognition_enabled = 0 roi = 430", "\"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550", "# input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550 height", "from utils import backbone from api import object_counting_api if tf.__version__ < '1.4.0': raise", "0 roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width,", "= backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550 height = 1028 is_color_recognition_enabled =", "object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi, deviation) # counting all the", "backbone from api import object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please upgrade your", "= 1028 is_color_recognition_enabled = 0 roi = 430 deviation = 10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph,", "input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550 height =", "installation to v1.4.* or later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index =", "import object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please upgrade your tensorflow installation to", "from api import object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please upgrade your tensorflow", "if tf.__version__ < '1.4.0': raise ImportError('Please upgrade your tensorflow installation to v1.4.* or", "detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width = 1550 height = 1028", "10 object_counting_api.cumulative_object_counting_y_axis(input_video, detection_graph, category_index, is_color_recognition_enabled, fps, width, height, roi, deviation) # counting all", "<filename>src/Vehicle_detection.py import tensorflow as tf from utils import backbone from api import object_counting_api", "later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80", "= \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17') fps = 80 width =", "object_counting_api if tf.__version__ < '1.4.0': raise ImportError('Please upgrade your tensorflow installation to v1.4.*", "ImportError('Please upgrade your tensorflow installation to v1.4.* or later!') input_video = \"./input_footage/trim.mp4\" #", "upgrade your tensorflow installation to v1.4.* or later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\"", "to v1.4.* or later!') input_video = \"./input_footage/trim.mp4\" # input_video=\"rtsp://admin:admin@555@192.168.1.108/cam/realmonitor?channel=1&subtype=1\" detection_graph, category_index = backbone.set_model('ssd_mobilenet_v1_coco_2017_11_17')" ]
[]
[ "IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS:", "as session: async with session.put( str(url), headers=headers, data=content ) as response: response_content =", "PORT)) if wait_for_result: loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen", "__str__(self) -> str: \"\"\"Return a readable representation of the gateway.\"\"\" return ( \"%s", "= 8080 self.set_config() async def reboot(self, update_config: bool, timeout: int = 30) ->", "None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size =", "= True self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config()", "(id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask:", "#: Set a new configuration. Gateway takes a few seconds to do the", "from resposes.\"\"\" result = [] discovered = [] loop = asyncio.get_event_loop() sock =", "def create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create new sensor object for given ID.\"\"\"", "Sensor: \"\"\"Create new sensor object for given ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result)", "from ipaddress import IPv4Address import aiohttp from multidict import CIMultiDictProxy from yarl import", "-> None: self._use_proxy = value @property def proxy(self) -> str: return str(self._proxy) @proxy.setter", "= self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def", "= \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway =", "the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes) -> bool:", "-> None: \"\"\"Detachs the gateway from the proxy and restore original settings.\"\"\" if", "0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos =", "orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] packet[175 - len(str_bytes) :", "Optional[SensorHandler] = None self._version = \"1.50\" self._last_seen: Optional[float] = None self._attached = False", "180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy", "-> Sensor: \"\"\"Create new sensor object for given ID.\"\"\" result = Sensor(self, sensor_id)", "command UDP packet to send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id, 10) return packet", "async def discover( local_ip_address: Optional[str] = None, timeout: int = 2, ) ->", "def orig_proxy(self) -> str: return str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port)", ": 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos", "int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port", "str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return", "bool = False, timeout: int = 2 ) -> Optional[bytes]: \"\"\"Sends command and", "IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos", "all communication with the gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP port", "if self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached =", "of the gateway to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip", "1024: raise ValueError(\"Invalid proxy port number\") self._proxy_port = value @property def fixed_dns(self) ->", "self._last_seen = time.time() self._initialized = True return result async def update_config(self, timeout: int", "loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet,", "None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str) -> Sensor:", "self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs", "self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop() config = await", "await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return config else: return None finally:", "self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy = False self.proxy", "objects created from resposes.\"\"\" result = [] discovered = [] loop = asyncio.get_event_loop()", "= None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] = dict() self._initialized = False", "%s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send data to", "= bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, \"big\")", "\"big\")) ) self._last_seen = time.time() def add_sensor(self, sensor: Sensor) -> None: \"\"\"Add sensor", "and (len(config) == int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config: bytes) -> bool: \"\"\"Parses", "def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content: bytes, ) -> None: \"\"\"Resend", "used by the gateway for comunnications PORT = 8003 # Commands which acceps", "socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return sock", "str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) : 44] = str_bytes", "session: async with session.put( str(url), headers=headers, data=content ) as response: response_content = await", "def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) -> None:", "= None self._fixed_gateway: Any = None self._name: Any = None self._server: Any =", "self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos +", "@name.setter def name(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 20: raise", "< 0 or value >= 64 * 1024: raise ValueError(\"Invalid proxy port number\")", "def init( self, config: Optional[bytes] = None, ) -> None: if config is", "0 packages_len = len(packages) while pos + 64 <= packages_len: await self.handle_sensor_update( packages[pos", "+= 64 async def handle_update(self, code: str, packages: bytes) -> None: \"\"\"Handle update", "None finally: sock.close() async def get_config(self, timeout: int = 2) -> Optional[bytes]: \"\"\"Obtains", "IPv4Address import aiohttp from multidict import CIMultiDictProxy from yarl import URL from .sensor", "\"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__, self.__class__.__qualname__,", "package_checksum: int) -> None: \"\"\"Handle update packet for one sensor.\"\"\" _LOGGER.debug( \"Update package", "str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 : 110 + len(str_bytes)] = str_bytes str_bytes =", "\"1.50\" self._last_seen: Optional[float] = None self._attached = False self._orig_use_proxy: Any = None self._orig_proxy:", "def check_config(config: bytes) -> bool: return ( config is not None and (len(config)", "time.time() return config else: return None finally: sock.close() async def get_config(self, timeout: int", "None: self.parse_config(config) def _check_init(self) -> None: if not self._initialized: raise Exception(\"Gateway is not", "self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip =", "ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180])", "= True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await self.get_config()", "packet_size = 181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] = self._id", "self, config: Optional[bytes] = None, ) -> None: if config is None: config", "proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) -> None: if", "None: config = await self.get_config() if config is not None: self.parse_config(config) def _check_init(self)", "str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ]", "Find any available gateway in the local network FIND_GATEWAY = 2 #: Find", "2 ) -> Optional[bytes]: \"\"\"Sends command and optional data to the gateway.\"\"\" packet", "= config.find(0, 49, 114) if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos", "def handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle gateway's bootup update packet.\"\"\" if (len(package)", "= self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result:", "return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) -> None: self._fixed_dns = IPv4Address(value) @property", "= None self._use_proxy: Any = None self._proxy: Any = None self._proxy_port: Any =", "= await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config):", "Callable[[Sensor], Awaitable[None]] #: all communication with the gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\"", "bytes, ) -> None: \"\"\"Resend gateway's PUT request to cloud server.\"\"\" if self._send_data_to_cloud:", "= None self._version = \"1.50\" self._last_seen: Optional[float] = None self._attached = False self._orig_use_proxy:", "Optional[str] = None, timeout: int = 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet", "formal representation of the gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \"", "\"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed", "orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size = 0", "= orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized = True return", "None, ) -> None: if config is None: config = await self.get_config() if", "@property def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) ->", "\"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time() def add_sensor(self, sensor:", "None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None", "None: \"\"\"Reset configuration of the gateway to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp", "raise ValueError(\"Server address is too long\") self._server = value @property def use_proxy(self) ->", "struct.pack(\">H6sH\", command, gateway_id, 10) return packet async def send_command( self, command: int, wait_for_result:", "Optional[str], ) -> socket.socket: \"\"\"Prepares UDP socket to comunicate with the gateway.\"\"\" sock", "- orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos", "3 + len(orig_name_bytes) else: orig_data_size = 0 orig_data = bytearray(orig_data_size) if orig_data_size >", "\" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \"", "self._proxy self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy = True self._proxy = IPv4Address(proxy)", "self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs the gateway from the proxy and restore", "takes about 10s for the gateway to be back up again ORIG_PROXY_BYTE1 =", "\"\"\"Handle update packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages) elif code == \"C0\": await", "def fixed_netmask(self, value: str) -> None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) ->", "url: URL, headers: CIMultiDictProxy[str], content: bytes, ) -> None: \"\"\"Resend gateway's PUT request", "self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen =", "bytes(str(self._proxy), \"utf-8\") packet[110 : 110 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 -", "self._attached @property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool)", "ValueError(\"Proxy server address is too long\") self._proxy = value @property def proxy_port(self) ->", "self._use_dhcp = config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway =", ") except Exception as e: _LOGGER.error(\"Error resending request to cloud: %r\", e) @property", "int, local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares UDP socket to comunicate with the", "self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy = None self._orig_proxy = None", "version(self) -> str: return self._version @property def last_seen(self) -> Optional[float]: return self._last_seen @property", "Any = None self._fixed_dns: Any = None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor]", "self.add_sensor(result) return result def get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return sensor object for", "packages: bytes) -> None: \"\"\"Handle update packet for few sensors.\"\"\" pos = 0", "None: self._use_proxy = value @property def proxy(self) -> str: return str(self._proxy) @proxy.setter def", "= 1 #: Find any available gateway in the local network FIND_GATEWAY =", "orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen", "self._id[3:6].hex().upper() @property def version(self) -> str: return self._version @property def last_seen(self) -> Optional[float]:", "#1 to mark preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte", "to be back up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1 to", "request to cloud server.\"\"\" if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session: async", "def server(self) -> str: return str(self._server) @server.setter def server(self, value: str) -> None:", "time from ipaddress import IPv4Address import aiohttp from multidict import CIMultiDictProxy from yarl", "bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10]", "sock.bind((\"\", 0)) return sock @staticmethod def prepare_command(command: int, gateway_id: bytes) -> bytes: \"\"\"Prepares", "self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3],", "hex(package_checksum), ) checksum = 0 for b in package: checksum += b checksum", "def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return str(self._orig_proxy)", "def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return bool(self._use_dhcp)", "\"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy else \"No\", self.proxy,", "config is None: config = await self.get_config() if config is not None: self.parse_config(config)", "\"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use", "1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0))", "from the gateway.\"\"\" result = self.check_config(config) and ( (self._id is None) or (self._id", "115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and", "not exists.\"\"\" result = self._sensors.get(sensor_id, None) if not result: result = self.create_sensor(sensor_id) return", "-> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] =", "256), timeout) except socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8]", "UDP packet to send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id, 10) return packet async", "time.time() def add_sensor(self, sensor: Sensor) -> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor", "self._orig_proxy_port: Any = None self._dhcp_ip: Any = None self._use_dhcp: Any = None self._fixed_ip:", "= None self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self, package:", "packet[44 : 44 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if", "to the proxy to read measuremnts. Existing proxy settings will be preserved \"\"\"", "= None self._fixed_dns: Any = None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] =", "Sensor] = dict() self._initialized = False async def init( self, config: Optional[bytes] =", "return sock @staticmethod def prepare_command(command: int, gateway_id: bytes) -> bytes: \"\"\"Prepares command UDP", "PORT)) while True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break", ") as response: response_content = await response.content.read() _LOGGER.debug( \"Cloud response status: %s content:", "orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos :", "value: bool) -> None: self._use_dhcp = value @property def fixed_ip(self) -> str: return", "IPv4Address(value) @property def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str)", "package: bytes) -> None: \"\"\"Handle gateway's bootup update packet.\"\"\" if (len(package) == 15)", "be back up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1 to mark", "self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached", "str) -> None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) -> str: return str(self._fixed_gateway)", "if self.last_seen is not None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask,", "orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size = 181 packet = bytearray(packet_size) packet[0:2]", "False async def init( self, config: Optional[bytes] = None, ) -> None: if", "= value @property def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def use_dhcp(self) ->", "\"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud", "return \"80\" + self._id[3:6].hex().upper() @property def version(self) -> str: return self._version @property def", "self._fixed_gateway: Any = None self._name: Any = None self._server: Any = None self._use_proxy:", "None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy server address is too long\")", "DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port:", "+ str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time() def add_sensor(self, sensor: Sensor) -> None:", "- len(str_bytes) : 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed", "if orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] =", "-> None: if len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name is too long\") self._name", "\"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r,", "timeout: int = 2) -> bool: \"\"\"Updates configuration from the gateway.\"\"\" config =", "self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 + len(orig_name_bytes)", "self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23 : 23 + len(str_bytes)]", "orig_data.extend(config[str_end_pos + 3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0", "continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return result", "gateway_id(self) -> str: return self._id.hex().upper() @property def serial(self) -> str: return \"80\" +", "str) -> Sensor: \"\"\"Create new sensor object for given ID.\"\"\" result = Sensor(self,", "def fixed_gateway(self, value: str) -> None: self._fixed_gateway = IPv4Address(value) @property def name(self) ->", "256), timeout) self._last_seen = time.time() return config else: return None finally: sock.close() async", "-> None: self._send_data_to_cloud = value @property def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property", "class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__( self, gateway_id: str, local_ip_address: Optional[str]", "= asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return config", "@property def orig_proxy(self) -> str: return str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return", "str) -> None: if len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name is too long\")", "finally: sock.close() async def get_config(self, timeout: int = 2) -> Optional[bytes]: \"\"\"Obtains configuration", "resposes.\"\"\" result = [] discovered = [] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout,", "# await self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs the gateway from the proxy", "a readable representation of the gateway.\"\"\" return ( \"%s V%s, SerialNo: %s (id:", "if code == \"00\": self.handle_bootup_update(packages) elif code == \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error(", "= bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] =", "-> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return str(self._orig_proxy) @property def", "str: \"\"\"Return a readable representation of the gateway.\"\"\" return ( \"%s V%s, SerialNo:", "= await response.content.read() _LOGGER.debug( \"Cloud response status: %s content: %s\", response.status, response_content.hex().upper(), )", ") % ( self.name, self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip,", "sensor def create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create new sensor object for given", "gateway's bootup update packet.\"\"\" if (len(package) == 15) and (package[5:11] == self._id): _LOGGER.debug(", "-> None: if config is None: config = await self.get_config() if config is", "def set_handler( self, handler: Optional[SensorHandler], ) -> None: self._handler = handler def attach_to_proxy(", "== ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 :", "orig_part_size packet[109 - len(str_bytes) : 109] = str_bytes packet[109] = self._use_proxy str_bytes =", "timeout) @staticmethod def check_config(config: bytes) -> bool: return ( config is not None", "<= packages_len: await self.handle_sensor_update( packages[pos : pos + 63], packages[pos + 63] )", "List, Optional import asyncio import logging import socket import struct import time from", "self._name = config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114) if", "last_seen(self) -> Optional[float]: return self._last_seen @property def attached(self) -> bool: return self._attached @property", "= IPv4Address(value) @property def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value:", "gateway_id in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally:", "if result: orig_data = bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp =", "@property def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) ->", "def last_seen(self) -> Optional[float]: return self._last_seen @property def attached(self) -> bool: return self._attached", "proxy(self) -> str: return str(self._proxy) @proxy.setter def proxy(self, value: str) -> None: if", "bootup update packet.\"\"\" if (len(package) == 15) and (package[5:11] == self._id): _LOGGER.debug( \"Gateway", "None, timeout: int = 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld", "config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] ==", "self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0 str_end_pos = config.find(0, 115, 180)", "else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy,", "int = 2 ) -> Optional[bytes]: \"\"\"Sends command and optional data to the", "= False self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None) self.set_config()", "str(self._proxy) @proxy.setter def proxy(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64:", "@fixed_dns.setter def fixed_dns(self, value: str) -> None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self)", "await self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs the gateway from the proxy and", "= config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1] ==", "session.put( str(url), headers=headers, data=content ) as response: response_content = await response.content.read() _LOGGER.debug( \"Cloud", "the gateway.\"\"\" result = self.check_config(config) and ( (self._id is None) or (self._id ==", "socket import struct import time from ipaddress import IPv4Address import aiohttp from multidict", "self._server: Any = None self._use_proxy: Any = None self._proxy: Any = None self._proxy_port:", "new configuration. Gateway takes a few seconds to do the update REBOOT =", "bytes) -> None: \"\"\"Handle update packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages) elif code", "0)) return sock @staticmethod def prepare_command(command: int, gateway_id: bytes) -> bytes: \"\"\"Prepares command", "get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return sensor object for given ID, creates the", "== 15) and (package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")),", "\"utf-8\")) > 20: raise ValueError(\"Name is too long\") self._name = value @property def", "orig_data = bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2,", "str, local_ip_address: Optional[str] = None, ) -> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address:", "orig_data_pos += orig_part_size packet[109 - len(str_bytes) : 109] = str_bytes packet[109] = self._use_proxy", "175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1,", "%s\", code, packages.hex().upper(), ) async def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content:", "discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return", "result = Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id: str) -> Sensor:", "None self._dhcp_ip: Any = None self._use_dhcp: Any = None self._fixed_ip: Any = None", "await gateway.init(config) result.append(gateway) finally: sock.close() return result def set_handler( self, handler: Optional[SensorHandler], )", "(len(package) == 15) and (package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5],", "self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str: \"\"\"Return a readable representation of", "to cloud: %r\", e) @property def gateway_id(self) -> str: return self._id.hex().upper() @property def", "return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) -> None: self._use_proxy = value @property", "\" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \"", "Any = None self._fixed_ip: Any = None self._fixed_netmask: Any = None self._fixed_gateway: Any", "\"\"\"Reset configuration of the gateway to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp =", "-> None: self._use_dhcp = value @property def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter", "handler: Optional[SensorHandler], ) -> None: self._handler = handler def attach_to_proxy( self, proxy: str,", "config = await self.get_config() if config is not None: self.parse_config(config) def _check_init(self) ->", "fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) -> None: self._fixed_gateway", "number\") self._proxy_port = value @property def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def", "2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld gateway objects created from", "-> str: return self._id.hex().upper() @property def serial(self) -> str: return \"80\" + self._id[3:6].hex().upper()", "URL from .sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #:", "None self._fixed_gateway: Any = None self._name: Any = None self._server: Any = None", "ValueError(\"Name is too long\") self._name = value @property def server(self) -> str: return", "update code %d, data %s\", code, packages.hex().upper(), ) async def resend_data_to_cloud( self, url:", "internet gataway.\"\"\" def __init__( self, gateway_id: str, local_ip_address: Optional[str] = None, ) ->", "return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) -> None: self._fixed_ip = IPv4Address(value) @property", "%s\" ) % ( self.name, self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else \"No\",", "with the gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP port used by", "config is not None: self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str] = None,", "int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) -> None: if value <", "sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return sock @staticmethod def prepare_command(command:", "len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1]", "= [] discovered = [] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet", "None: self._send_data_to_cloud = value @property def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def", "gateway from the proxy and restore original settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy", "len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy server address is too long\") self._proxy =", "self.parse_config(config) def _check_init(self) -> None: if not self._initialized: raise Exception(\"Gateway is not initialized\")", "@property def last_seen(self) -> Optional[float]: return self._last_seen @property def attached(self) -> bool: return", "): orig_data.extend(config[str_end_pos + 3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] !=", "address is too long\") self._server = value @property def use_proxy(self) -> bool: return", "yarl import URL from .sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor],", "Any = None self._proxy_port: Any = None self._fixed_dns: Any = None self._send_data_to_cloud =", "\"\"\"Handle update packet for few sensors.\"\"\" pos = 0 packages_len = len(packages) while", "command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15]", "__init__( self, gateway_id: str, local_ip_address: Optional[str] = None, ) -> None: self._id: bytes", "given ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id: str)", "ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id: str) ->", "self.server = \"www.data199.com\" self.use_proxy = False self.proxy = \"192.168.1.1\" self.proxy_port = 8080 self.set_config()", "a formal representation of the gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r,", "-> str: return \"80\" + self._id[3:6].hex().upper() @property def version(self) -> str: return self._version", "dict() self._initialized = False async def init( self, config: Optional[bytes] = None, )", "= str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address)", "value @property def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool)", "- len(str_bytes) : 109] = str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\")", "True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\"", "= bytes(self._name, \"utf-8\") packet[23 : 23 + len(str_bytes)] = str_bytes str_bytes = bytes(21", ") -> None: \"\"\"Resend gateway's PUT request to cloud server.\"\"\" if self._send_data_to_cloud: try:", "name(self) -> str: return str(self._name) @name.setter def name(self, value: str) -> None: if", "%s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = ( str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15],", "= IPv4Address(value) @property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) ->", "self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] =", "-> None: \"\"\"Resend gateway's PUT request to cloud server.\"\"\" if self._send_data_to_cloud: try: async", "self.set_config() # await self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs the gateway from the", "config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1", "value: str) -> None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) -> bool: return", "to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask", "-> str: return str(self._proxy) @proxy.setter def proxy(self, value: str) -> None: if len(bytes(value,", "> 64: raise ValueError(\"Server address is too long\") self._server = value @property def", "def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) -> None:", "self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str: \"\"\"Return a readable", "None self._proxy: Any = None self._proxy_port: Any = None self._fixed_dns: Any = None", "self._initialized = False async def init( self, config: Optional[bytes] = None, ) ->", "= 5 #: A reboot takes about 10s for the gateway to be", "int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized", "= IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self) ->", "packages: bytes) -> None: \"\"\"Handle update packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages) elif", "config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0 str_end_pos = config.find(0, 115, 180) self._proxy =", "None: \"\"\"Handle update packet for one sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum %s\",", "return False def set_config(self) -> None: \"\"\"Set configuration to the gateway.\"\"\" self._check_init() command", "def name(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name", "config = await self.get_config(timeout) if config is not None: return self.parse_config(config) else: return", "loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return", "len(str_bytes)) packet[44 - len(str_bytes) : 44] = str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44", "packages[pos : pos + 63], packages[pos + 63] ) pos += 64 async", "= None self._use_dhcp: Any = None self._fixed_ip: Any = None self._fixed_netmask: Any =", "gateway.\"\"\" return ( \"%s V%s, SerialNo: %s (id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP", "#: Request the configuration of the gateway SET_CONFIG = 4 #: Set a", "for few sensors.\"\"\" pos = 0 packages_len = len(packages) while pos + 64", "update_config and config is not None: self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str]", ") -> None: \"\"\"Attachs the gateway to the proxy to read measuremnts. Existing", "sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while", "gateway to be back up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1", "\"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r,", ") def __str__(self) -> str: \"\"\"Return a readable representation of the gateway.\"\"\" return", "self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud = value @property def", "not None: self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str] = None, timeout: int", "@property def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) ->", "= ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3)", "GET_CONFIG = 3 #: Request the configuration of the gateway SET_CONFIG = 4", "str: return str(self._name) @name.setter def name(self, value: str) -> None: if len(bytes(value, \"utf-8\"))", "@property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return", "is None) or (self._id == config[2:8]) ) if result: orig_data = bytearray() self._id", "self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages: bytes) -> None:", "> 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos", "= [] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6))", "_LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = ( str(int.from_bytes(package[11:13], \"big\"))", "\" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \"", "config = await self.send_command(REBOOT, update_config, timeout) if update_config and config is not None:", "%s\", package.hex().upper(), hex(package_checksum), ) checksum = 0 for b in package: checksum +=", "discover packet and yeld gateway objects created from resposes.\"\"\" result = [] discovered", ") checksum = 0 for b in package: checksum += b checksum &=", "the gateway to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip =", "== package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if", "= None self._dhcp_ip: Any = None self._use_dhcp: Any = None self._fixed_ip: Any =", "= None, timeout: int = 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and", "and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port", "= 0 packages_len = len(packages) while pos + 64 <= packages_len: await self.handle_sensor_update(", "use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) -> None: self._use_proxy", "update packet.\"\"\" if (len(package) == 15) and (package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup", "self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle gateway's", "object for given ID, creates the sensor if not exists.\"\"\" result = self._sensors.get(sensor_id,", "+ len(str_bytes)] = str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) :", "= None self._proxy_port: Any = None self._fixed_dns: Any = None self._send_data_to_cloud = True", "and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114]) self._server", "gateway in the local network FIND_GATEWAY = 2 #: Find a single available", "is not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 + len(orig_name_bytes) else:", "= IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49,", "\"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial,", "return config else: return None finally: sock.close() async def get_config(self, timeout: int =", "= None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle gateway's bootup", "cloud: %s\\n\" \"Last Contact: %s\" ) % ( self.name, self.version, self.serial, self.gateway_id, \"Yes\"", "self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) -> None: \"\"\"Reset configuration", "use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) -> None: self._use_dhcp", "await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code %d, data %s\", code, packages.hex().upper(), )", "ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns =", "__repr__(self) -> str: \"\"\"Return a formal representation of the gateway.\"\"\" return ( \"%s.%s(%s(%s),", "network FIND_GATEWAY = 2 #: Find a single available gateway in the local", "available gateway in the local network GET_CONFIG = 3 #: Request the configuration", "= orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size packet[109 -", "int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) -> None: if value < 0 or", "be preserved \"\"\" if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy", "not None: return self.parse_config(config) else: return False def set_config(self) -> None: \"\"\"Set configuration", "4 #: Set a new configuration. Gateway takes a few seconds to do", "result = self.check_config(config) and ( (self._id is None) or (self._id == config[2:8]) )", "orig_proxy(self) -> str: return str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def", "\" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \"", ") -> None: self._handler = handler def attach_to_proxy( self, proxy: str, proxy_port: int,", "= struct.pack(\">H6sH\", command, gateway_id, 10) return packet async def send_command( self, command: int,", "Awaitable, Callable, Dict, List, Optional import asyncio import logging import socket import struct", "-> None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter", "-> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld gateway objects created from resposes.\"\"\" result", "\"Update package %s, checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum = 0 for b", "> 20: raise ValueError(\"Name is too long\") self._name = value @property def server(self)", "gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout)", "return result def get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return sensor object for given", "Any = None self._use_proxy: Any = None self._proxy: Any = None self._proxy_port: Any", "\"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r,", "import struct import time from ipaddress import IPv4Address import aiohttp from multidict import", "= None self._fixed_netmask: Any = None self._fixed_gateway: Any = None self._name: Any =", "not None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server,", "mark preserved original proxy settings class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__(", "ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 +", "%s\\n\" \"Last Contact: %s\" ) % ( self.name, self.version, self.serial, self.gateway_id, \"Yes\" if", "self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] = dict() self._initialized = False async def", "int = 2) -> Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY,", "the gateway and optional update configuration.\"\"\" config = await self.send_command(REBOOT, update_config, timeout) if", "[] discovered = [] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet =", "packet[109 - len(str_bytes) : 109] = str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy),", "sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else:", "= self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy =", "original proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2 to mark preserved", "bool) -> None: self._send_data_to_cloud = value @property def dhcp_ip(self) -> str: return str(self._dhcp_ip)", "\"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version,", "\"version=%r, \" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r,", "is None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached =", "None: \"\"\"Set configuration to the gateway.\"\"\" self._check_init() command = SET_CONFIG if self._orig_use_proxy is", "representation of the gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r,", "\"80\" + self._id[3:6].hex().upper() @property def version(self) -> str: return self._version @property def last_seen(self)", "reboot(self, update_config: bool, timeout: int = 30) -> None: \"\"\"Reboots the gateway and", "3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized = True return result async", "= command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp", "bytes) -> bool: \"\"\"Parses configuration obtained from the gateway.\"\"\" result = self.check_config(config) and", "to mark preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2", "\"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send data to cloud: %s\\n\" \"Last Contact:", "self.version, time.ctime(self.last_seen) if self.last_seen is not None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp,", "the local network FIND_GATEWAY = 2 #: Find a single available gateway in", "packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages) elif code == \"C0\": await self.handle_sensors_update(packages) else:", "-> Sensor: \"\"\"Return sensor object for given ID, creates the sensor if not", "packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close()", "configuration of the gateway to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True", ") if result: orig_data = bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp", "str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool)", "= None self._server: Any = None self._use_proxy: Any = None self._proxy: Any =", "local_ip_address self._handler: Optional[SensorHandler] = None self._version = \"1.50\" self._last_seen: Optional[float] = None self._attached", "self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str: \"\"\"Return", "attached(self) -> bool: return self._attached @property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter", "49, 114) if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2]", "None self._orig_proxy_port: Any = None self._dhcp_ip: Any = None self._use_dhcp: Any = None", "@property def proxy(self) -> str: return str(self._proxy) @proxy.setter def proxy(self, value: str) ->", "update packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages) elif code == \"C0\": await self.handle_sensors_update(packages)", "command: int, wait_for_result: bool = False, timeout: int = 2 ) -> Optional[bytes]:", "+ 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos +", "packages.hex().upper(), ) async def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content: bytes, )", "await self.get_config() if config is not None: self.parse_config(config) def _check_init(self) -> None: if", "bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None self._version = \"1.50\" self._last_seen:", "0 str_end_pos = config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos +", "sensors.\"\"\" pos = 0 packages_len = len(packages) while pos + 64 <= packages_len:", "break if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id) gateway", "single available gateway in the local network GET_CONFIG = 3 #: Request the", "= value @property def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value:", "(BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout)", "= config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114) if (", "return None finally: sock.close() async def get_config(self, timeout: int = 2) -> Optional[bytes]:", "(len(config) >= 186) and (len(config) == int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config: bytes)", "bytes) -> None: \"\"\"Handle update packet for few sensors.\"\"\" pos = 0 packages_len", "self.get_config(timeout) if config is not None: return self.parse_config(config) else: return False def set_config(self)", "= \"192.168.1.1\" self.proxy_port = 8080 self.set_config() async def reboot(self, update_config: bool, timeout: int", "socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in", "SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication with the gateways are broadcasts BROADCAST_ADDR", "self._attached = False self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None)", "return self._last_seen @property def attached(self) -> bool: return self._attached @property def send_data_to_cloud(self) ->", "CIMultiDictProxy[str], content: bytes, ) -> None: \"\"\"Resend gateway's PUT request to cloud server.\"\"\"", "too long\") self._name = value @property def server(self) -> str: return str(self._server) @server.setter", "len(orig_name_bytes) else: orig_data_size = 0 orig_data = bytearray(orig_data_size) if orig_data_size > 0: orig_data[0]", "\"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if self.last_seen is not", "await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes) -> bool: return ( config", "= self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy = None self._orig_proxy =", "result def set_handler( self, handler: Optional[SensorHandler], ) -> None: self._handler = handler def", "* 1024: raise ValueError(\"Invalid proxy port number\") self._proxy_port = value @property def fixed_dns(self)", "] orig_data_pos += orig_part_size packet[109 - len(str_bytes) : 109] = str_bytes packet[109] =", "initialized\") @staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares UDP", "= self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes =", "-> str: return self._version @property def last_seen(self) -> Optional[float]: return self._last_seen @property def", "(BROADCAST_ADDR, PORT)) while True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout:", "CIMultiDictProxy from yarl import URL from .sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler", "gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return result def set_handler(", "= self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size", "-> bytes: \"\"\"Prepares command UDP packet to send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id,", "import socket import struct import time from ipaddress import IPv4Address import aiohttp from", "@property def attached(self) -> bool: return self._attached @property def send_data_to_cloud(self) -> bool: return", "\"\"\"Handle update packet for one sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum %s\", package.hex().upper(),", "str: return str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self,", "value: str) -> None: if len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name is too", "_check_init(self) -> None: if not self._initialized: raise Exception(\"Gateway is not initialized\") @staticmethod def", "gateway.\"\"\" result = self.check_config(config) and ( (self._id is None) or (self._id == config[2:8])", "handle_update(self, code: str, packages: bytes) -> None: \"\"\"Handle update packets.\"\"\" if code ==", "request to cloud: %r\", e) @property def gateway_id(self) -> str: return self._id.hex().upper() @property", "package %s, checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum = 0 for b in", "A reboot takes about 10s for the gateway to be back up again", "self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\"", "code == \"00\": self.handle_bootup_update(packages) elif code == \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow", "_LOGGER.debug( \"Update package %s, checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum = 0 for", "server.\"\"\" if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session: async with session.put( str(url),", "bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2", "packet = struct.pack(\">H6sH\", command, gateway_id, 10) return packet async def send_command( self, command:", "None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle gateway's bootup update", "-> None: \"\"\"Handle update packet for one sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum", "63] ) pos += 64 async def handle_update(self, code: str, packages: bytes) ->", "int, wait_for_result: bool = False, timeout: int = 2 ) -> Optional[bytes]: \"\"\"Sends", "( self.name, self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask,", "% ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not", "= SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size =", "len(str_bytes) : 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock", "\" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \"", "str_bytes = bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) : 44] = str_bytes str_bytes", "= orig_name_bytes orig_data_pos = 0 packet_size = 181 packet = bytearray(packet_size) packet[0:2] =", "self, gateway_id: str, local_ip_address: Optional[str] = None, ) -> None: self._id: bytes =", "= str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) : 44] =", "if self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 +", "= None self._attached = False self._orig_use_proxy: Any = None self._orig_proxy: Any = None", "self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not None else \"never\", self.attached,", "Sensor) -> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str)", "= config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28])", "-> None: \"\"\"Handle update packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages) elif code ==", "@property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return a", "name(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name is", "\"\"\"Parses configuration obtained from the gateway.\"\"\" result = self.check_config(config) and ( (self._id is", "\"Send data to cloud: %s\\n\" \"Last Contact: %s\" ) % ( self.name, self.version,", "else \"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if self.last_seen is", "timeout) self._last_seen = time.time() return config else: return None finally: sock.close() async def", "%s\\n\" \"Proxy Port: %s\\n\" \"Send data to cloud: %s\\n\" \"Last Contact: %s\" )", ": orig_data_pos + orig_part_size ] packet[175 - len(str_bytes) : 175] = str_bytes packet[175:177]", "for one sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum", "value @property def server(self) -> str: return str(self._server) @server.setter def server(self, value: str)", "self._handler: Optional[SensorHandler] = None self._version = \"1.50\" self._last_seen: Optional[float] = None self._attached =", "= True return result async def update_config(self, timeout: int = 2) -> bool:", "optional data to the gateway.\"\"\" packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address)", "None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def", "\" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id,", "packet for one sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum %s\", package.hex().upper(), hex(package_checksum), )", "self.gateway_id, \"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\"", "self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop =", "= local_ip_address self._handler: Optional[SensorHandler] = None self._version = \"1.50\" self._last_seen: Optional[float] = None", "package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages:", "self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle gateway's bootup update packet.\"\"\"", "data to the gateway.\"\"\" packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try:", "value @property def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str)", "bool: \"\"\"Parses configuration obtained from the gateway.\"\"\" result = self.check_config(config) and ( (self._id", "1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return sock @staticmethod", "\"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r,", "orig_data_pos + orig_part_size ] packet[175 - len(str_bytes) : 175] = str_bytes packet[175:177] =", "def parse_config(self, config: bytes) -> bool: \"\"\"Parses configuration obtained from the gateway.\"\"\" result", "if self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if", "timeout: int = 2 ) -> Optional[bytes]: \"\"\"Sends command and optional data to", "try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except asyncio.TimeoutError: break", "def proxy_port(self, value: int) -> None: if value < 0 or value >=", "def use_dhcp(self, value: bool) -> None: self._use_dhcp = value @property def fixed_ip(self) ->", "bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return str(self._orig_proxy) @property def orig_proxy_port(self)", "def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) -> None:", "port number\") self._proxy_port = value @property def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter", "self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size = 181 packet =", "fixed_gateway(self, value: str) -> None: self._fixed_gateway = IPv4Address(value) @property def name(self) -> str:", "-> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server address is too long\")", "send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id, 10) return packet async def send_command( self,", "self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23 :", ": pos + 63], packages[pos + 63] ) pos += 64 async def", "-> None: if value < 0 or value >= 64 * 1024: raise", "resending request to cloud: %r\", e) @property def gateway_id(self) -> str: return self._id.hex().upper()", "#: UDP port used by the gateway for comunnications PORT = 8003 #", "if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port", "if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size", "Commands which acceps gateway via UDP: DISCOVER_GATEWAYS = 1 #: Find any available", "orig_data = bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] !=", "%s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\"", "self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23 : 23 + len(str_bytes)] = str_bytes str_bytes", "checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum = 0 for b in package: checksum", "data to cloud: %s\\n\" \"Last Contact: %s\" ) % ( self.name, self.version, self.serial,", "async def handle_sensor_update(self, package: bytes, package_checksum: int) -> None: \"\"\"Handle update packet for", "Any = None self._orig_proxy_port: Any = None self._dhcp_ip: Any = None self._use_dhcp: Any", "= 2 ) -> Optional[bytes]: \"\"\"Sends command and optional data to the gateway.\"\"\"", "self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy = False self.proxy = \"192.168.1.1\" self.proxy_port", "response.content.read() _LOGGER.debug( \"Cloud response status: %s content: %s\", response.status, response_content.hex().upper(), ) except Exception", "= 8003 # Commands which acceps gateway via UDP: DISCOVER_GATEWAYS = 1 #:", "seconds to do the update REBOOT = 5 #: A reboot takes about", "self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self,", "from the gateway.\"\"\" config = await self.get_config(timeout) if config is not None: return", "ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3", "+ orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] packet[175 - len(str_bytes)", "packet[175 - len(str_bytes) : 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] =", "-> None: \"\"\"Reboots the gateway and optional update configuration.\"\"\" config = await self.send_command(REBOOT,", ") pos += 64 async def handle_update(self, code: str, packages: bytes) -> None:", "= 0x19 #: 'Magic' byte #1 to mark preserved original proxy settings ORIG_PROXY_BYTE2", "sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return sensor object", "and config is not None: self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str] =", "bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size = 0 orig_data =", "async def reboot(self, update_config: bool, timeout: int = 30) -> None: \"\"\"Reboots the", "sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop()", "value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy server address", "= None, ) -> None: if config is None: config = await self.get_config()", "= self._sensors.get(sensor_id, None) if not result: result = self.create_sensor(sensor_id) return result async def", "self._proxy = value @property def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self,", "3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos +=", ") def parse_config(self, config: bytes) -> bool: \"\"\"Parses configuration obtained from the gateway.\"\"\"", "Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def", "config: Optional[bytes] = None, ) -> None: if config is None: config =", "= 0 orig_data = bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3]", "config[2:8]) ) if result: orig_data = bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15])", "from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes) ->", "-> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) -> None: self._use_proxy =", "to cloud: %s\\n\" \"Last Contact: %s\" ) % ( self.name, self.version, self.serial, self.gateway_id,", "\"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if", "\"\"\"Return sensor object for given ID, creates the sensor if not exists.\"\"\" result", "and (package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version", "gateway's PUT request to cloud server.\"\"\" if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as", "if wait_for_result: loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen =", "cloud server.\"\"\" if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session: async with session.put(", "while True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except", ": 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] packet[175", "int, gateway_id: bytes) -> bytes: \"\"\"Prepares command UDP packet to send.\"\"\" packet =", "packages_len = len(packages) while pos + 64 <= packages_len: await self.handle_sensor_update( packages[pos :", "def __init__( self, gateway_id: str, local_ip_address: Optional[str] = None, ) -> None: self._id:", "str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) -> None: self._fixed_netmask = IPv4Address(value)", "False def set_config(self) -> None: \"\"\"Set configuration to the gateway.\"\"\" self._check_init() command =", "None: \"\"\"Detachs the gateway from the proxy and restore original settings.\"\"\" if self._attached:", "def prepare_command(command: int, gateway_id: bytes) -> bytes: \"\"\"Prepares command UDP packet to send.\"\"\"", "sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return", "response: response_content = await response.content.read() _LOGGER.debug( \"Cloud response status: %s content: %s\", response.status,", "\"\"\"Attachs the gateway to the proxy to read measuremnts. Existing proxy settings will", "REBOOT = 5 #: A reboot takes about 10s for the gateway to", "update configuration.\"\"\" config = await self.send_command(REBOOT, update_config, timeout) if update_config and config is", "typing import Any, Awaitable, Callable, Dict, List, Optional import asyncio import logging import", "= None self._orig_proxy_port: Any = None self._dhcp_ip: Any = None self._use_dhcp: Any =", "self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy = None self._orig_proxy", "orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size packet[109 - len(str_bytes) : 109] =", "else: return False def set_config(self) -> None: \"\"\"Set configuration to the gateway.\"\"\" self._check_init()", "\"big\")) ) def parse_config(self, config: bytes) -> bool: \"\"\"Parses configuration obtained from the", "the gateway.\"\"\" packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR,", "handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle gateway's bootup update packet.\"\"\" if (len(package) ==", "self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not None else", "e) @property def gateway_id(self) -> str: return self._id.hex().upper() @property def serial(self) -> str:", "config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos", "back up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1 to mark preserved", "proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs the gateway", "packet and yeld gateway objects created from resposes.\"\"\" result = [] discovered =", "self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None self._version", "is not None and (len(config) >= 186) and (len(config) == int.from_bytes(config[8:10], \"big\")) )", "timeout: int = 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld gateway", "measuremnts. Existing proxy settings will be preserved \"\"\" if self._orig_use_proxy is None: self._orig_use_proxy", "configuration. Gateway takes a few seconds to do the update REBOOT = 5", "packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet,", "result: orig_data = bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15]", "sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return sock @staticmethod def prepare_command(command: int, gateway_id: bytes)", "\"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\"", "len(str_bytes) : 44] = str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44 : 44 +", "= IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24])", "if config is not None: return self.parse_config(config) else: return False def set_config(self) ->", "< orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos,", "creates the sensor if not exists.\"\"\" result = self._sensors.get(sensor_id, None) if not result:", "% ( self.name, self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip,", "ipaddress import IPv4Address import aiohttp from multidict import CIMultiDictProxy from yarl import URL", "takes a few seconds to do the update REBOOT = 5 #: A", "if not result: result = self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package: bytes,", "+ 64 <= packages_len: await self.handle_sensor_update( packages[pos : pos + 63], packages[pos +", "self._sensors: Dict[str, Sensor] = dict() self._initialized = False async def init( self, config:", "%s\", response.status, response_content.hex().upper(), ) except Exception as e: _LOGGER.error(\"Error resending request to cloud:", "self.get_config() if config is not None: self.parse_config(config) def _check_init(self) -> None: if not", "= None self._name: Any = None self._server: Any = None self._use_proxy: Any =", "use_proxy(self, value: bool) -> None: self._use_proxy = value @property def proxy(self) -> str:", "orig_part_size ] orig_data_pos += orig_part_size packet[109 - len(str_bytes) : 109] = str_bytes packet[109]", "one sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum =", "Any = None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] = dict() self._initialized =", "value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server address is", "self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs the gateway from", "None) if not result: result = self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package:", "= bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size = 0 orig_data", "except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in discovered: continue", "packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23 : 23 + len(str_bytes)] =", "( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ):", "async def handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle update packet for few sensors.\"\"\"", "config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name", "self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str:", "bool: return ( config is not None and (len(config) >= 186) and (len(config)", "None: \"\"\"Resend gateway's PUT request to cloud server.\"\"\" if self._send_data_to_cloud: try: async with", "ValueError(\"Invalid proxy port number\") self._proxy_port = value @property def fixed_dns(self) -> str: return", "- len(str_bytes) : 44] = str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44 : 44", ": 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock =", "to the gateway.\"\"\" packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet,", "not None: self.parse_config(config) def _check_init(self) -> None: if not self._initialized: raise Exception(\"Gateway is", "None self._attached = False self._orig_use_proxy: Any = None self._orig_proxy: Any = None self._orig_proxy_port:", "sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum = 0", "config is not None and (len(config) >= 186) and (len(config) == int.from_bytes(config[8:10], \"big\"))", "the local network GET_CONFIG = 3 #: Request the configuration of the gateway", "fixed_netmask(self, value: str) -> None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) -> str:", "len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name is too long\") self._name = value @property", "\"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r,", "packet[110 : 110 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if", "int: return int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return a formal representation of the", "!= 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name =", "pos + 64 <= packages_len: await self.handle_sensor_update( packages[pos : pos + 63], packages[pos", "Sensor: \"\"\"Return sensor object for given ID, creates the sensor if not exists.\"\"\"", "\"Last Contact: %s\" ) % ( self.name, self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp", "asyncio import logging import socket import struct import time from ipaddress import IPv4Address", "+ \".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time() def add_sensor(self, sensor: Sensor)", "self._proxy_port = value @property def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self,", "self._use_proxy = config[114] != 0 str_end_pos = config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\")", "%s content: %s\", response.status, response_content.hex().upper(), ) except Exception as e: _LOGGER.error(\"Error resending request", "gateway and optional update configuration.\"\"\" config = await self.send_command(REBOOT, update_config, timeout) if update_config", "return int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return a formal representation of the gateway.\"\"\"", "2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\")", "self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 : 110 + len(str_bytes)] = str_bytes str_bytes", "value: str) -> None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) -> str: return", "int, handler: SensorHandler, ) -> None: \"\"\"Attachs the gateway to the proxy to", "@property def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) ->", "communication with the gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP port used", "result.append(gateway) finally: sock.close() return result def set_handler( self, handler: Optional[SensorHandler], ) -> None:", "with session.put( str(url), headers=headers, data=content ) as response: response_content = await response.content.read() _LOGGER.debug(", "\"\"\"Return a readable representation of the gateway.\"\"\" return ( \"%s V%s, SerialNo: %s", "import time from ipaddress import IPv4Address import aiohttp from multidict import CIMultiDictProxy from", "str: return self._id.hex().upper() @property def serial(self) -> str: return \"80\" + self._id[3:6].hex().upper() @property", "+ orig_part_size ] orig_data_pos += orig_part_size packet[109 - len(str_bytes) : 109] = str_bytes", "= self._proxy_port self._attached = True self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port =", "\"\"\"Sends command and optional data to the gateway.\"\"\" packet = self.prepare_command(command, self._id) sock", "None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def", "self._sensors.get(sensor_id, None) if not result: result = self.create_sensor(sensor_id) return result async def handle_sensor_update(self,", "broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP port used by the gateway for comunnications", ": 44 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos", "bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size]", "Exception as e: _LOGGER.error(\"Error resending request to cloud: %r\", e) @property def gateway_id(self)", "Any, Awaitable, Callable, Dict, List, Optional import asyncio import logging import socket import", "package: checksum += b checksum &= 0x7F if checksum == package_checksum: self._last_seen =", "\"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r,", "orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size packet[109 - len(str_bytes) :", "return str(self._name) @name.setter def name(self, value: str) -> None: if len(bytes(value, \"utf-8\")) >", "UDP socket to comunicate with the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,", "return ( \"%s V%s, SerialNo: %s (id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP:", "packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] =", "str(self._name) @name.setter def name(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 20:", "ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2 to mark preserved original proxy settings", "orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size]", "config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port =", "self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\" if", "to send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id, 10) return packet async def send_command(", "sensor object for given ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result) return result def", "Any = None self._name: Any = None self._server: Any = None self._use_proxy: Any", "self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy else \"No\", self.proxy, self.proxy_port,", "[] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try:", "result = self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package: bytes, package_checksum: int) ->", "%s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send data to cloud: %s\\n\" \"Last", "orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes)", "Any = None self._fixed_netmask: Any = None self._fixed_gateway: Any = None self._name: Any", "command, gateway_id, 10) return packet async def send_command( self, command: int, wait_for_result: bool", "self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28, 49)].decode(\"utf-8\")", "config.find(0, 49, 114) if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos +", "( config is not None and (len(config) >= 186) and (len(config) == int.from_bytes(config[8:10],", "and optional update configuration.\"\"\" config = await self.send_command(REBOOT, update_config, timeout) if update_config and", "to the gateway.\"\"\" self._check_init() command = SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes", "from .sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all", "self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package: bytes, package_checksum: int) -> None: \"\"\"Handle", "self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create new sensor object", "value @property def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def use_dhcp(self) -> bool:", "= False self._orig_use_proxy: Any = None self._orig_proxy: Any = None self._orig_proxy_port: Any =", "config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask", "packages_len: await self.handle_sensor_update( packages[pos : pos + 63], packages[pos + 63] ) pos", "of the gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \"", "orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] packet[175 - len(str_bytes) : 175] =", "\"utf-8\") packet[44 : 44 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes))", "BROADCAST_ADDR = \"255.255.255.255\" #: UDP port used by the gateway for comunnications PORT", "\"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port,", "def handle_update(self, code: str, packages: bytes) -> None: \"\"\"Handle update packets.\"\"\" if code", "timeout) except socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8] if", "created from resposes.\"\"\" result = [] discovered = [] loop = asyncio.get_event_loop() sock", "orig_data_pos = 0 packet_size = 181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\")", "is not None: self.parse_config(config) def _check_init(self) -> None: if not self._initialized: raise Exception(\"Gateway", "checksum &= 0x7F if checksum == package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper()", "URL, headers: CIMultiDictProxy[str], content: bytes, ) -> None: \"\"\"Resend gateway's PUT request to", "return packet async def send_command( self, command: int, wait_for_result: bool = False, timeout:", "= time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor)", "\"\"\"Resend gateway's PUT request to cloud server.\"\"\" if self._send_data_to_cloud: try: async with aiohttp.ClientSession()", "def name(self) -> str: return str(self._name) @name.setter def name(self, value: str) -> None:", "= bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0", "gateway.\"\"\" packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT))", "packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if", "\"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy", "\"utf-8\")) > 64: raise ValueError(\"Server address is too long\") self._server = value @property", "preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2 to mark", "local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return result def set_handler( self, handler: Optional[SensorHandler],", "len(str_bytes)] = str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) : 44]", "self._version = \"1.50\" self._last_seen: Optional[float] = None self._attached = False self._orig_use_proxy: Any =", "3 #: Request the configuration of the gateway SET_CONFIG = 4 #: Set", "server(self) -> str: return str(self._server) @server.setter def server(self, value: str) -> None: if", "wait_for_result: bool = False, timeout: int = 2 ) -> Optional[bytes]: \"\"\"Sends command", "None: self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str] = None, timeout: int =", "self._orig_use_proxy: Any = None self._orig_proxy: Any = None self._orig_proxy_port: Any = None self._dhcp_ip:", "bool: return self._attached @property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self,", "self.use_proxy = False self.proxy = \"192.168.1.1\" self.proxy_port = 8080 self.set_config() async def reboot(self,", "== int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config: bytes) -> bool: \"\"\"Parses configuration obtained", "\"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) % (", "int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config: bytes) -> bool: \"\"\"Parses configuration obtained from", "-> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) -> None: if value", "self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy = None", "return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) -> None: self._fixed_netmask = IPv4Address(value) @property", "@staticmethod def prepare_command(command: int, gateway_id: bytes) -> bytes: \"\"\"Prepares command UDP packet to", "value: bool) -> None: self._send_data_to_cloud = value @property def dhcp_ip(self) -> str: return", "ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114])", "@property def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return", "def fixed_ip(self, value: str) -> None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) ->", "self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not None else \"never\", self.attached, self.send_data_to_cloud,", "again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1 to mark preserved original proxy", "self._send_data_to_cloud = value @property def dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def use_dhcp(self)", "self.name, self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway,", "the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False)", "the gateway from the proxy and restore original settings.\"\"\" if self._attached: self._use_proxy =", ": 23 + len(str_bytes)] = str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44 -", "network GET_CONFIG = 3 #: Request the configuration of the gateway SET_CONFIG =", "49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114) if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1", "parse_config(self, config: bytes) -> bool: \"\"\"Parses configuration obtained from the gateway.\"\"\" result =", "bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256),", "gateway_id, 10) return packet async def send_command( self, command: int, wait_for_result: bool =", "asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id)", "is too long\") self._proxy = value @property def proxy_port(self) -> int: return int(self._proxy_port)", "str) -> Sensor: \"\"\"Return sensor object for given ID, creates the sensor if", "result = self._sensors.get(sensor_id, None) if not result: result = self.create_sensor(sensor_id) return result async", "send_command( self, command: int, wait_for_result: bool = False, timeout: int = 2 )", "add_sensor(self, sensor: Sensor) -> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self,", "= orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized = True return result async def update_config(self,", "str, proxy_port: int, handler: SensorHandler, ) -> None: \"\"\"Attachs the gateway to the", "handler: SensorHandler, ) -> None: \"\"\"Attachs the gateway to the proxy to read", "+ len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size:", "Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__( self, gateway_id: str, local_ip_address: Optional[str] =", "= Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config = await", "the update REBOOT = 5 #: A reboot takes about 10s for the", "\"\"\"Obtains configuration from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config:", "long\") self._proxy = value @property def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def", "handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle update packet for few sensors.\"\"\" pos =", "update REBOOT = 5 #: A reboot takes about 10s for the gateway", "return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes) -> bool: return (", "= IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos", "if len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos =", "(self._id is None) or (self._id == config[2:8]) ) if result: orig_data = bytearray()", "\"\"\"Prepares command UDP packet to send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id, 10) return", "> 64: raise ValueError(\"Proxy server address is too long\") self._proxy = value @property", "gateway to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip = \"192.168.1.222\"", "if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id) gateway =", "MobileAlerts internet gataway.\"\"\" def __init__( self, gateway_id: str, local_ip_address: Optional[str] = None, )", "None self._proxy_port: Any = None self._fixed_dns: Any = None self._send_data_to_cloud = True self._sensors:", "bytes(self._server, \"utf-8\") packet[44 : 44 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 -", "3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] packet[175 -", "if not exists.\"\"\" result = self._sensors.get(sensor_id, None) if not result: result = self.create_sensor(sensor_id)", "def _check_init(self) -> None: if not self._initialized: raise Exception(\"Gateway is not initialized\") @staticmethod", "bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = ( str(int.from_bytes(package[11:13], \"big\")) + \".\"", "\"Unknnow update code %d, data %s\", code, packages.hex().upper(), ) async def resend_data_to_cloud( self,", "def gateway_id(self) -> str: return self._id.hex().upper() @property def serial(self) -> str: return \"80\"", "of the gateway SET_CONFIG = 4 #: Set a new configuration. Gateway takes", "config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114) if ( config[str_end_pos + 1]", "set_config(self) -> None: \"\"\"Set configuration to the gateway.\"\"\" self._check_init() command = SET_CONFIG if", "result async def update_config(self, timeout: int = 2) -> bool: \"\"\"Updates configuration from", "self._initialized: raise Exception(\"Gateway is not initialized\") @staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str],", "False, timeout: int = 2 ) -> Optional[bytes]: \"\"\"Sends command and optional data", "True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except asyncio.TimeoutError:", "-> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) -> None: self._use_dhcp =", "orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return a formal representation", "if self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle update", "self._fixed_dns: Any = None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] = dict() self._initialized", "= \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy = False self.proxy = \"192.168.1.1\" self.proxy_port =", "self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port", "= IPv4Address(value) @property def name(self) -> str: return str(self._name) @name.setter def name(self, value:", "sensor_id: str) -> Sensor: \"\"\"Return sensor object for given ID, creates the sensor", "ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114]", "status: %s content: %s\", response.status, response_content.hex().upper(), ) except Exception as e: _LOGGER.error(\"Error resending", "packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config =", "= await self.get_config() if config is not None: self.parse_config(config) def _check_init(self) -> None:", "time.ctime(self.last_seen) if self.last_seen is not None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip,", "str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) -> None: self._fixed_gateway = IPv4Address(value) @property def", "\"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send", "and optional data to the gateway.\"\"\" packet = self.prepare_command(command, self._id) sock = self.prepare_socket(timeout,", "self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0,", "in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close()", "Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return result def set_handler( self, handler:", "the gateway to the proxy to read measuremnts. Existing proxy settings will be", "raise ValueError(\"Name is too long\") self._name = value @property def server(self) -> str:", "(len(config) == int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config: bytes) -> bool: \"\"\"Parses configuration", "to cloud server.\"\"\" if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session: async with", "update_config, timeout) if update_config and config is not None: self.parse_config(config) @staticmethod async def", "-> Optional[bytes]: \"\"\"Sends command and optional data to the gateway.\"\"\" packet = self.prepare_command(command,", "self.use_dhcp = True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns", "64: raise ValueError(\"Server address is too long\") self._server = value @property def use_proxy(self)", "time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = ( str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\"))", "@fixed_gateway.setter def fixed_gateway(self, value: str) -> None: self._fixed_gateway = IPv4Address(value) @property def name(self)", "str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time() def add_sensor(self,", "self._check_init() command = SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\")", "if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy", "= self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size = 181 packet", "orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size packet[109 - len(str_bytes)", "= bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None self._version = \"1.50\"", "try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout)", "= proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self) -> None: \"\"\"Detachs the", "packet to send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id, 10) return packet async def", "configuration of the gateway SET_CONFIG = 4 #: Set a new configuration. Gateway", "orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized = True return result async def update_config(self, timeout:", "Optional[float]: return self._last_seen @property def attached(self) -> bool: return self._attached @property def send_data_to_cloud(self)", "-> None: self._fixed_gateway = IPv4Address(value) @property def name(self) -> str: return str(self._name) @name.setter", "bytes) -> bytes: \"\"\"Prepares command UDP packet to send.\"\"\" packet = struct.pack(\">H6sH\", command,", "\"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__( self, gateway_id: str, local_ip_address: Optional[str] = None,", ": 44] = str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44 : 44 + len(str_bytes)]", "-> None: self._handler = handler def attach_to_proxy( self, proxy: str, proxy_port: int, handler:", "command = SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size", "= 2) -> bool: \"\"\"Updates configuration from the gateway.\"\"\" config = await self.get_config(timeout)", "%s (id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed", "discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return result def", "109] = str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 : 110", "3 : 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) >", "response_content = await response.content.read() _LOGGER.debug( \"Cloud response status: %s content: %s\", response.status, response_content.hex().upper(),", "-> None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter", "\"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = ( str(int.from_bytes(package[11:13], \"big\")) +", "aiohttp from multidict import CIMultiDictProxy from yarl import URL from .sensor import Sensor", "-> None: \"\"\"Reset configuration of the gateway to default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\"", "63], packages[pos + 63] ) pos += 64 async def handle_update(self, code: str,", "\"\"\"Set configuration to the gateway.\"\"\" self._check_init() command = SET_CONFIG if self._orig_use_proxy is not", "1 #: Find any available gateway in the local network FIND_GATEWAY = 2", "bytes: \"\"\"Prepares command UDP packet to send.\"\"\" packet = struct.pack(\">H6sH\", command, gateway_id, 10)", "config[114] != 0 str_end_pos = config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if (", "None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached = True", "if update_config and config is not None: self.parse_config(config) @staticmethod async def discover( local_ip_address:", "to mark preserved original proxy settings class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def", "for b in package: checksum += b checksum &= 0x7F if checksum ==", "str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) -> None: self._fixed_dns = IPv4Address(value) @property def", "True, timeout) @staticmethod def check_config(config: bytes) -> bool: return ( config is not", "= False self.proxy = \"192.168.1.1\" self.proxy_port = 8080 self.set_config() async def reboot(self, update_config:", "await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id", "packet[44 - len(str_bytes) : 44] = str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44 :", "packet[23 : 23 + len(str_bytes)] = str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44", "self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy = True self._proxy", "- 3) str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos +", "object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create new sensor", "available gateway in the local network FIND_GATEWAY = 2 #: Find a single", "%s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\"", "str_bytes = bytes(self._name, \"utf-8\") packet[23 : 23 + len(str_bytes)] = str_bytes str_bytes =", "PORT)) finally: sock.close() def reset_config(self) -> None: \"\"\"Reset configuration of the gateway to", "len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server address is too long\") self._server = value", "-> bool: return self._attached @property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def", "by the gateway for comunnications PORT = 8003 # Commands which acceps gateway", "packages[pos + 63] ) pos += 64 async def handle_update(self, code: str, packages:", "None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def", "False self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None) self.set_config() def", "= 0 for b in package: checksum += b checksum &= 0x7F if", "= config[114] != 0 str_end_pos = config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if", "\" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \"", "timeout: int = 30) -> None: \"\"\"Reboots the gateway and optional update configuration.\"\"\"", "\"\"\"Return a formal representation of the gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \"", "(package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version =", "def server(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server", "sock.close() return result def set_handler( self, handler: Optional[SensorHandler], ) -> None: self._handler =", "True return result async def update_config(self, timeout: int = 2) -> bool: \"\"\"Updates", "0x74 #: 'Magic' byte #2 to mark preserved original proxy settings class Gateway:", "packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 : 110 + len(str_bytes)] =", "181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10] =", "config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114) if ( config[str_end_pos", "self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await", "2 #: Find a single available gateway in the local network GET_CONFIG =", "IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28,", "V%s, SerialNo: %s (id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP:", "update packet for one sensor.\"\"\" _LOGGER.debug( \"Update package %s, checksum %s\", package.hex().upper(), hex(package_checksum),", "(self._id == config[2:8]) ) if result: orig_data = bytearray() self._id = config[2:8] self._dhcp_ip", "Exception(\"Gateway is not initialized\") @staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str], ) ->", "sensor if not exists.\"\"\" result = self._sensors.get(sensor_id, None) if not result: result =", "value: str) -> None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self) -> str: return", "\"utf-8\")) > 64: raise ValueError(\"Proxy server address is too long\") self._proxy = value", "headers=headers, data=content ) as response: response_content = await response.content.read() _LOGGER.debug( \"Cloud response status:", "gateway SET_CONFIG = 4 #: Set a new configuration. Gateway takes a few", "if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2", "None: \"\"\"Handle update packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages) elif code == \"C0\":", "self.prepare_command(command, self._id) sock = self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop", "\"\"\"Detachs the gateway from the proxy and restore original settings.\"\"\" if self._attached: self._use_proxy", "= 4 #: Set a new configuration. Gateway takes a few seconds to", "result async def handle_sensor_update(self, package: bytes, package_checksum: int) -> None: \"\"\"Handle update packet", "= self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 : 110 + len(str_bytes)] = str_bytes", "bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None self._version =", "self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self,", "reset_config(self) -> None: \"\"\"Reset configuration of the gateway to default values.\"\"\" self.name =", "from typing import Any, Awaitable, Callable, Dict, List, Optional import asyncio import logging", "def set_config(self) -> None: \"\"\"Set configuration to the gateway.\"\"\" self._check_init() command = SET_CONFIG", "server(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server address", "self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns,", "IPv4Address(value) @property def name(self) -> str: return str(self._name) @name.setter def name(self, value: str)", "== \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code %d, data %s\", code,", "3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy", "@property def gateway_id(self) -> str: return self._id.hex().upper() @property def serial(self) -> str: return", "%s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\"", "raise ValueError(\"Proxy server address is too long\") self._proxy = value @property def proxy_port(self)", "Optional[SensorHandler], ) -> None: self._handler = handler def attach_to_proxy( self, proxy: str, proxy_port:", "local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return sock @staticmethod def prepare_command(command: int, gateway_id:", "\" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \"", "gateway_id: bytes) -> bytes: \"\"\"Prepares command UDP packet to send.\"\"\" packet = struct.pack(\">H6sH\",", "gataway.\"\"\" from typing import Any, Awaitable, Callable, Dict, List, Optional import asyncio import", "self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self,", "with aiohttp.ClientSession() as session: async with session.put( str(url), headers=headers, data=content ) as response:", "return self.parse_config(config) else: return False def set_config(self) -> None: \"\"\"Set configuration to the", "\"big\") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed", "self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes) -> bool: return ( config is", "self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self)", "self._proxy: Any = None self._proxy_port: Any = None self._fixed_dns: Any = None self._send_data_to_cloud", ") % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is", "local_ip_address: Optional[str] = None, timeout: int = 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover", "@staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares UDP socket", "\"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" )", "\"\"\"Prepares UDP socket to comunicate with the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET,", "socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address,", "'Magic' byte #2 to mark preserved original proxy settings class Gateway: \"\"\"Controls MobileAlerts", "self._use_proxy = value @property def proxy(self) -> str: return str(self._proxy) @proxy.setter def proxy(self,", "bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return str(self._orig_proxy) @property def orig_proxy_port(self) -> int:", "ValueError(\"Server address is too long\") self._server = value @property def use_proxy(self) -> bool:", "def __repr__(self) -> str: \"\"\"Return a formal representation of the gateway.\"\"\" return (", "internet gataway.\"\"\" from typing import Any, Awaitable, Callable, Dict, List, Optional import asyncio", "-> Optional[float]: return self._last_seen @property def attached(self) -> bool: return self._attached @property def", "not result: result = self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package: bytes, package_checksum:", "( \"%s V%s, SerialNo: %s (id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\"", "import CIMultiDictProxy from yarl import URL from .sensor import Sensor _LOGGER = logging.getLogger(__name__)", "def add_sensor(self, sensor: Sensor) -> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def", "self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session: async with session.put( str(url), headers=headers, data=content", "not initialized\") @staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares", "self._server = value @property def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self,", "0x19 #: 'Magic' byte #1 to mark preserved original proxy settings ORIG_PROXY_BYTE2 =", "-> str: \"\"\"Return a formal representation of the gateway.\"\"\" return ( \"%s.%s(%s(%s), \"", "response.status, response_content.hex().upper(), ) except Exception as e: _LOGGER.error(\"Error resending request to cloud: %r\",", "check_config(config: bytes) -> bool: return ( config is not None and (len(config) >=", "the gateway.\"\"\" self._check_init() command = SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes =", "proxy settings will be preserved \"\"\" if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy", "self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self)", "44] = str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44 : 44 + len(str_bytes)] =", "= ( str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time()", "raise ValueError(\"Invalid proxy port number\") self._proxy_port = value @property def fixed_dns(self) -> str:", "= 30) -> None: \"\"\"Reboots the gateway and optional update configuration.\"\"\" config =", "None self._orig_proxy: Any = None self._orig_proxy_port: Any = None self._dhcp_ip: Any = None", "return self._attached @property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value:", "packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\")", "the gateway.\"\"\" return ( \"%s V%s, SerialNo: %s (id: %s)\\n\" \"Use DHCP: %s\\n\"", "= asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR,", "up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1 to mark preserved original", "byte #1 to mark preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic'", "= Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return", "configuration to the gateway.\"\"\" self._check_init() command = SET_CONFIG if self._orig_use_proxy is not None:", "orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size = 181", "self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if self.last_seen is not None else", "sensor object for given ID, creates the sensor if not exists.\"\"\" result =", "proxy(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy server", "( (self._id is None) or (self._id == config[2:8]) ) if result: orig_data =", "new sensor object for given ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result) return result", "import logging import socket import struct import time from ipaddress import IPv4Address import", "-> str: return str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def", "= IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 : config.find(0,", "\"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if self.last_seen is not None else \"never\",", "sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) -> None: \"\"\"Reset configuration of the", "Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send data to cloud: %s\\n\"", "= self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy =", "= str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 : 110 +", "representation of the gateway.\"\"\" return ( \"%s V%s, SerialNo: %s (id: %s)\\n\" \"Use", "settings will be preserved \"\"\" if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy", "bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) -> None: self._use_dhcp = value", "bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud = value", "-> None: \"\"\"Handle gateway's bootup update packet.\"\"\" if (len(package) == 15) and (package[5:11]", "self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 :", "finally: sock.close() def reset_config(self) -> None: \"\"\"Reset configuration of the gateway to default", "checksum == package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package)", "\"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy = False self.proxy = \"192.168.1.1\"", "+ 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\")", "await self.get_config(timeout) if config is not None: return self.parse_config(config) else: return False def", "self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy else", "def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud = value @property def dhcp_ip(self) ->", "int = 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld gateway objects", "readable representation of the gateway.\"\"\" return ( \"%s V%s, SerialNo: %s (id: %s)\\n\"", "IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server:", "#: Find any available gateway in the local network FIND_GATEWAY = 2 #:", "b in package: checksum += b checksum &= 0x7F if checksum == package_checksum:", "packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2,", "= ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3", "self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code %d, data %s\", code, packages.hex().upper(), ) async", "the proxy and restore original settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy =", "b checksum &= 0x7F if checksum == package_checksum: self._last_seen = time.time() sensor_id =", "-> bool: return ( config is not None and (len(config) >= 186) and", ": orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size packet[109 - len(str_bytes) : 109]", "Contact: %s\" ) % ( self.name, self.version, self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else", "struct import time from ipaddress import IPv4Address import aiohttp from multidict import CIMultiDictProxy", "async def handle_update(self, code: str, packages: bytes) -> None: \"\"\"Handle update packets.\"\"\" if", "package.hex().upper(), hex(package_checksum), ) checksum = 0 for b in package: checksum += b", ">= 186) and (len(config) == int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config: bytes) ->", "configuration from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes)", "self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud", "64 * 1024: raise ValueError(\"Invalid proxy port number\") self._proxy_port = value @property def", "str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) -> None: self._fixed_ip = IPv4Address(value) @property def", "self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop() config =", "%d, data %s\", code, packages.hex().upper(), ) async def resend_data_to_cloud( self, url: URL, headers:", "-> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) -> None: self._fixed_ip =", "= \"1.50\" self._last_seen: Optional[float] = None self._attached = False self._orig_use_proxy: Any = None", "or (self._id == config[2:8]) ) if result: orig_data = bytearray() self._id = config[2:8]", "asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return config else: return None finally: sock.close()", "proxy and restore original settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy", "0 packet_size = 181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] =", "the gateway to be back up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte", "str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized = True", "\" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \"", "to do the update REBOOT = 5 #: A reboot takes about 10s", ") -> socket.socket: \"\"\"Prepares UDP socket to comunicate with the gateway.\"\"\" sock =", "orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized = True return result", "Request the configuration of the gateway SET_CONFIG = 4 #: Set a new", "None, ) -> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler:", "= Callable[[Sensor], Awaitable[None]] #: all communication with the gateways are broadcasts BROADCAST_ADDR =", "\"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed", "Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send data to cloud: %s\\n\" \"Last Contact: %s\"", "None: \"\"\"Handle update packet for few sensors.\"\"\" pos = 0 packages_len = len(packages)", "\"\"\"MobileAlerts internet gataway.\"\"\" from typing import Any, Awaitable, Callable, Dict, List, Optional import", "def discover( local_ip_address: Optional[str] = None, timeout: int = 2, ) -> List[\"Gateway\"]:", "logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication with the gateways are broadcasts", "-> None: if not self._initialized: raise Exception(\"Gateway is not initialized\") @staticmethod def prepare_socket(", "= value @property def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value:", "packet async def send_command( self, command: int, wait_for_result: bool = False, timeout: int", "SensorHandler, ) -> None: \"\"\"Attachs the gateway to the proxy to read measuremnts.", "self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False", "-> socket.socket: \"\"\"Prepares UDP socket to comunicate with the gateway.\"\"\" sock = socket.socket(socket.AF_INET,", "str_bytes = bytes(self._server, \"utf-8\") packet[44 : 44 + len(str_bytes)] = str_bytes str_bytes =", "def reboot(self, update_config: bool, timeout: int = 30) -> None: \"\"\"Reboots the gateway", "SerialNo: %s (id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\"", "default values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask =", "self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy", ": 109] = str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 :", "IPv4Address(value) @property def fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str)", "for given ID, creates the sensor if not exists.\"\"\" result = self._sensors.get(sensor_id, None)", "return self._id.hex().upper() @property def serial(self) -> str: return \"80\" + self._id[3:6].hex().upper() @property def", "local network GET_CONFIG = 3 #: Request the configuration of the gateway SET_CONFIG", "create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create new sensor object for given ID.\"\"\" result", "str: \"\"\"Return a formal representation of the gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s,", "@property def version(self) -> str: return self._version @property def last_seen(self) -> Optional[float]: return", "self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name,", "= self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR,", "None self._version = \"1.50\" self._last_seen: Optional[float] = None self._attached = False self._orig_use_proxy: Any", "gateway.init(config) result.append(gateway) finally: sock.close() return result def set_handler( self, handler: Optional[SensorHandler], ) ->", "asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id =", "async def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content: bytes, ) -> None:", "orig_data_size = 0 orig_data = bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] = self._orig_use_proxy", "\"utf-8\") packet[23 : 23 + len(str_bytes)] = str_bytes str_bytes = bytes(21 - len(str_bytes))", "= value @property def proxy(self) -> str: return str(self._proxy) @proxy.setter def proxy(self, value:", "optional update configuration.\"\"\" config = await self.send_command(REBOOT, update_config, timeout) if update_config and config", "if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session: async with session.put( str(url), headers=headers,", "str: return str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self) ->", "-> str: return str(self._name) @name.setter def name(self, value: str) -> None: if len(bytes(value,", "value < 0 or value >= 64 * 1024: raise ValueError(\"Invalid proxy port", ">= 64 * 1024: raise ValueError(\"Invalid proxy port number\") self._proxy_port = value @property", "= Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True:", "def __str__(self) -> str: \"\"\"Return a readable representation of the gateway.\"\"\" return (", "0 for b in package: checksum += b checksum &= 0x7F if checksum", "+ self._id[3:6].hex().upper() @property def version(self) -> str: return self._version @property def last_seen(self) ->", "is not None: return self.parse_config(config) else: return False def set_config(self) -> None: \"\"\"Set", "except socket.timeout: break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id", "False self.proxy = \"192.168.1.1\" self.proxy_port = 8080 self.set_config() async def reboot(self, update_config: bool,", "Any = None self._server: Any = None self._use_proxy: Any = None self._proxy: Any", "None self._name: Any = None self._server: Any = None self._use_proxy: Any = None", "str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try:", "exists.\"\"\" result = self._sensors.get(sensor_id, None) if not result: result = self.create_sensor(sensor_id) return result", "@property def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) ->", "0x7F if checksum == package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor =", "None: self._fixed_gateway = IPv4Address(value) @property def name(self) -> str: return str(self._name) @name.setter def", "except Exception as e: _LOGGER.error(\"Error resending request to cloud: %r\", e) @property def", "+ 3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0 str_end_pos", "self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = ( str(int.from_bytes(package[11:13],", "\"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r,", "return self._version @property def last_seen(self) -> Optional[float]: return self._last_seen @property def attached(self) ->", "sock.close() def reset_config(self) -> None: \"\"\"Reset configuration of the gateway to default values.\"\"\"", "update_config(self, timeout: int = 2) -> bool: \"\"\"Updates configuration from the gateway.\"\"\" config", "self.handle_bootup_update(packages) elif code == \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code %d,", "return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud = value @property", "bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) -> None: self._use_dhcp = value @property def", "\"utf-8\") orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size = 0 orig_data = bytearray(orig_data_size)", "( str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time() def", "\"\"\"Updates configuration from the gateway.\"\"\" config = await self.get_config(timeout) if config is not", "if config is None: config = await self.get_config() if config is not None:", "proxy_port: int, handler: SensorHandler, ) -> None: \"\"\"Attachs the gateway to the proxy", "for comunnications PORT = 8003 # Commands which acceps gateway via UDP: DISCOVER_GATEWAYS", "30) -> None: \"\"\"Reboots the gateway and optional update configuration.\"\"\" config = await", "= await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return config else: return None", "await self._handler(sensor) async def handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle update packet for", "@proxy.setter def proxy(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise", "gateway.\"\"\" self._check_init() command = SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy,", "init( self, config: Optional[bytes] = None, ) -> None: if config is None:", "= 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld gateway objects created", "\"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r,", "- len(str_bytes)) packet[44 - len(str_bytes) : 44] = str_bytes str_bytes = bytes(self._server, \"utf-8\")", "ID, creates the sensor if not exists.\"\"\" result = self._sensors.get(sensor_id, None) if not", "= None self._fixed_ip: Any = None self._fixed_netmask: Any = None self._fixed_gateway: Any =", "self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy,", "self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self)", "code == \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code %d, data %s\",", "resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content: bytes, ) -> None: \"\"\"Resend gateway's", "config else: return None finally: sock.close() async def get_config(self, timeout: int = 2)", "+ orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size", "timeout) if update_config and config is not None: self.parse_config(config) @staticmethod async def discover(", "%r\", e) @property def gateway_id(self) -> str: return self._id.hex().upper() @property def serial(self) ->", "from multidict import CIMultiDictProxy from yarl import URL from .sensor import Sensor _LOGGER", "def update_config(self, timeout: int = 2) -> bool: \"\"\"Updates configuration from the gateway.\"\"\"", "object for given ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self,", "\"192.168.1.1\" self.proxy_port = 8080 self.set_config() async def reboot(self, update_config: bool, timeout: int =", "if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy server address is too long\") self._proxy", "self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) ->", ") async def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content: bytes, ) ->", "= bytes(self._server, \"utf-8\") packet[44 : 44 + len(str_bytes)] = str_bytes str_bytes = bytearray(65", "socket to comunicate with the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)", "set_handler( self, handler: Optional[SensorHandler], ) -> None: self._handler = handler def attach_to_proxy( self,", "-> int: return int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return a formal representation of", "is too long\") self._name = value @property def server(self) -> str: return str(self._server)", "long\") self._name = value @property def server(self) -> str: return str(self._server) @server.setter def", "= orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] packet[175 - len(str_bytes) : 175]", "is not None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns,", "timeout: int, local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares UDP socket to comunicate with", "@property def server(self) -> str: return str(self._server) @server.setter def server(self, value: str) ->", "\"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\"", "response status: %s content: %s\", response.status, response_content.hex().upper(), ) except Exception as e: _LOGGER.error(\"Error", "async def send_command( self, command: int, wait_for_result: bool = False, timeout: int =", "= None self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) -> None:", "= None self._orig_proxy: Any = None self._orig_proxy_port: Any = None self._dhcp_ip: Any =", "str: return self._version @property def last_seen(self) -> Optional[float]: return self._last_seen @property def attached(self)", "str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy server address is", "IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos =", "str(url), headers=headers, data=content ) as response: response_content = await response.content.read() _LOGGER.debug( \"Cloud response", "self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy =", "self.fixed_dns, self.server, \"Yes\" if self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else", "def attached(self) -> bool: return self._attached @property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud", "a single available gateway in the local network GET_CONFIG = 3 #: Request", ".sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication", "self.proxy_port = 8080 self.set_config() async def reboot(self, update_config: bool, timeout: int = 30)", "bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) -> None: self._use_proxy = value", "\"\"\"Handle gateway's bootup update packet.\"\"\" if (len(package) == 15) and (package[5:11] == self._id):", "\" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__,", "self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not None else \"never\",", "self._orig_proxy: Any = None self._orig_proxy_port: Any = None self._dhcp_ip: Any = None self._use_dhcp:", "fixed_dns(self, value: str) -> None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) -> bool:", "%s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\"", "@property def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) ->", "bool) -> None: self._use_dhcp = value @property def fixed_ip(self) -> str: return str(self._fixed_ip)", "data %s\", code, packages.hex().upper(), ) async def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str],", "-> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) -> None: self._fixed_dns =", "bytes) -> bool: return ( config is not None and (len(config) >= 186)", "PORT = 8003 # Commands which acceps gateway via UDP: DISCOVER_GATEWAYS = 1", "not self._initialized: raise Exception(\"Gateway is not initialized\") @staticmethod def prepare_socket( timeout: int, local_ip_address:", "headers: CIMultiDictProxy[str], content: bytes, ) -> None: \"\"\"Resend gateway's PUT request to cloud", "\"big\")), ) self._version = ( str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\")) )", "value @property def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str)", "read measuremnts. Existing proxy settings will be preserved \"\"\" if self._orig_use_proxy is None:", "-> None: \"\"\"Set configuration to the gateway.\"\"\" self._check_init() command = SET_CONFIG if self._orig_use_proxy", "-> str: \"\"\"Return a readable representation of the gateway.\"\"\" return ( \"%s V%s,", "and (len(config) >= 186) and (len(config) == int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config:", "\"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r,", "3) str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size", "orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size = 0 orig_data = bytearray(orig_data_size) if", "import IPv4Address import aiohttp from multidict import CIMultiDictProxy from yarl import URL from", "@fixed_netmask.setter def fixed_netmask(self, value: str) -> None: self._fixed_netmask = IPv4Address(value) @property def fixed_gateway(self)", "fixed_ip(self, value: str) -> None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) -> str:", "gateway to the proxy to read measuremnts. Existing proxy settings will be preserved", "= int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time()", "sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle", "\"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy = False self.proxy = \"192.168.1.1\" self.proxy_port = 8080", "packet for few sensors.\"\"\" pos = 0 packages_len = len(packages) while pos +", "return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return str(self._orig_proxy) @property def orig_proxy_port(self) ->", "23 + len(str_bytes)] = str_bytes str_bytes = bytes(21 - len(str_bytes)) packet[44 - len(str_bytes)", "_LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication with the gateways", "self.server, \"Yes\" if self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\",", "str_end_pos = config.find(0, 49, 114) if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and", "min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size] = orig_data[", "Optional[str] = None, ) -> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] =", "= 0 packet_size = 181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8]", "config is not None: return self.parse_config(config) else: return False def set_config(self) -> None:", "= await self.send_command(REBOOT, update_config, timeout) if update_config and config is not None: self.parse_config(config)", "_LOGGER.debug( \"Cloud response status: %s content: %s\", response.status, response_content.hex().upper(), ) except Exception as", "async def init( self, config: Optional[bytes] = None, ) -> None: if config", "send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud = value @property def dhcp_ip(self) -> str:", "timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = ( str(int.from_bytes(package[11:13], \"big\")) + \".\" +", "int = 2) -> bool: \"\"\"Updates configuration from the gateway.\"\"\" config = await", "(BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) -> None: \"\"\"Reset configuration of the gateway", "yeld gateway objects created from resposes.\"\"\" result = [] discovered = [] loop", "else: _LOGGER.error( \"Unknnow update code %d, data %s\", code, packages.hex().upper(), ) async def", "= 3 + len(orig_name_bytes) else: orig_data_size = 0 orig_data = bytearray(orig_data_size) if orig_data_size", "\"Yes\" if self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen)", "configuration.\"\"\" config = await self.send_command(REBOOT, update_config, timeout) if update_config and config is not", "proxy to read measuremnts. Existing proxy settings will be preserved \"\"\" if self._orig_use_proxy", "self._last_seen: Optional[float] = None self._attached = False self._orig_use_proxy: Any = None self._orig_proxy: Any", "len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size =", "5 #: A reboot takes about 10s for the gateway to be back", "self._version @property def last_seen(self) -> Optional[float]: return self._last_seen @property def attached(self) -> bool:", "Any = None self._dhcp_ip: Any = None self._use_dhcp: Any = None self._fixed_ip: Any", "Optional[bytes]: \"\"\"Sends command and optional data to the gateway.\"\"\" packet = self.prepare_command(command, self._id)", "ORIG_PROXY_BYTE1 = 0x19 #: 'Magic' byte #1 to mark preserved original proxy settings", "self.set_config() async def reboot(self, update_config: bool, timeout: int = 30) -> None: \"\"\"Reboots", "str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) -", ") -> Optional[bytes]: \"\"\"Sends command and optional data to the gateway.\"\"\" packet =", "10) return packet async def send_command( self, command: int, wait_for_result: bool = False,", "\"big\") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15] =", "@fixed_ip.setter def fixed_ip(self, value: str) -> None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self)", "= IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\")", "Callable, Dict, List, Optional import asyncio import logging import socket import struct import", "= 2) -> Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True,", "self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, )", "bool: \"\"\"Updates configuration from the gateway.\"\"\" config = await self.get_config(timeout) if config is", "SET_CONFIG if self._orig_use_proxy is not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3", "str) -> None: self._fixed_ip = IPv4Address(value) @property def fixed_netmask(self) -> str: return str(self._fixed_netmask)", "self._use_dhcp = value @property def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self,", "] packet[175 - len(str_bytes) : 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\") packet[177:181]", "1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3", "bool, timeout: int = 30) -> None: \"\"\"Reboots the gateway and optional update", "2) -> bool: \"\"\"Updates configuration from the gateway.\"\"\" config = await self.get_config(timeout) if", ") self._version = ( str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen", "2) -> Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout)", "sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if", "self, url: URL, headers: CIMultiDictProxy[str], content: bytes, ) -> None: \"\"\"Resend gateway's PUT", "+ 63] ) pos += 64 async def handle_update(self, code: str, packages: bytes)", "0 or value >= 64 * 1024: raise ValueError(\"Invalid proxy port number\") self._proxy_port", "\"%s V%s, SerialNo: %s (id: %s)\\n\" \"Use DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed", "( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not None", "= self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23", "config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config)", "def handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle update packet for few sensors.\"\"\" pos", "about 10s for the gateway to be back up again ORIG_PROXY_BYTE1 = 0x19", "import asyncio import logging import socket import struct import time from ipaddress import", "self._initialized = True return result async def update_config(self, timeout: int = 2) ->", "discover( local_ip_address: Optional[str] = None, timeout: int = 2, ) -> List[\"Gateway\"]: \"\"\"Broadcasts", "\"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r,", "= packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23]", "-> Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod", "orig_name_bytes orig_data_pos = 0 packet_size = 181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2,", "code, packages.hex().upper(), ) async def resend_data_to_cloud( self, url: URL, headers: CIMultiDictProxy[str], content: bytes,", "= str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44 : 44 + len(str_bytes)] = str_bytes", "value: int) -> None: if value < 0 or value >= 64 *", "= value @property def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value:", "async def get_config(self, timeout: int = 2) -> Optional[bytes]: \"\"\"Obtains configuration from the", "\" \"version=%r, \" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \"", "self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str: \"\"\"Return a readable representation", "and ( (self._id is None) or (self._id == config[2:8]) ) if result: orig_data", "original proxy settings class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__( self, gateway_id:", "IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway", "@send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud = value @property def dhcp_ip(self)", "3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0 str_end_pos =", "the gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \" \"attached=%r,", "config[str_end_pos + 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114]) self._server =", "List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld gateway objects created from resposes.\"\"\" result =", "@property def serial(self) -> str: return \"80\" + self._id[3:6].hex().upper() @property def version(self) ->", "break except asyncio.TimeoutError: break if Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in discovered:", "IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def detach_from_proxy(self) -> None:", "Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None self._version = \"1.50\" self._last_seen: Optional[float] =", "self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy,", "self._name: Any = None self._server: Any = None self._use_proxy: Any = None self._proxy:", "None: self._use_dhcp = value @property def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def", "prepare_socket( timeout: int, local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares UDP socket to comunicate", "orig_part_size] = orig_data[ orig_data_pos : orig_data_pos + orig_part_size ] orig_data_pos += orig_part_size packet[109", "return result async def update_config(self, timeout: int = 2) -> bool: \"\"\"Updates configuration", "= \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy = False self.proxy =", "if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if self.last_seen is not None else \"never\", )", "self.handle_sensor_update( packages[pos : pos + 63], packages[pos + 63] ) pos += 64", "def reset_config(self) -> None: \"\"\"Reset configuration of the gateway to default values.\"\"\" self.name", "self._attached = False self._orig_use_proxy: Any = None self._orig_proxy: Any = None self._orig_proxy_port: Any", "str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) -> None: self._fixed_ip = IPv4Address(value)", "proxy_port(self, value: int) -> None: if value < 0 or value >= 64", "packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes", "gateway in the local network GET_CONFIG = 3 #: Request the configuration of", "None: \"\"\"Attachs the gateway to the proxy to read measuremnts. Existing proxy settings", "self, proxy: str, proxy_port: int, handler: SensorHandler, ) -> None: \"\"\"Attachs the gateway", "too long\") self._server = value @property def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter", "aiohttp.ClientSession() as session: async with session.put( str(url), headers=headers, data=content ) as response: response_content", "\"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send data to cloud:", "sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return sock @staticmethod def", "\"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized =", "int = 30) -> None: \"\"\"Reboots the gateway and optional update configuration.\"\"\" config", "None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy,", "self._last_seen @property def attached(self) -> bool: return self._attached @property def send_data_to_cloud(self) -> bool:", "orig_data_pos : orig_data_pos + orig_part_size ] packet[175 - len(str_bytes) : 175] = str_bytes", "\"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port =", "self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port", "= self.prepare_socket(timeout, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop() config", "import URL from .sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]]", "None self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes)", "bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) -> None: self._use_proxy = value @property def", "orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if", "in package: checksum += b checksum &= 0x7F if checksum == package_checksum: self._last_seen", "+= orig_part_size packet[109 - len(str_bytes) : 109] = str_bytes packet[109] = self._use_proxy str_bytes", "IPv4Address(value) @property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str:", "+ len(orig_name_bytes) else: orig_data_size = 0 orig_data = bytearray(orig_data_size) if orig_data_size > 0:", "None: if len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name is too long\") self._name =", "@staticmethod async def discover( local_ip_address: Optional[str] = None, timeout: int = 2, )", "self._fixed_ip: Any = None self._fixed_netmask: Any = None self._fixed_gateway: Any = None self._name:", "-> None: \"\"\"Handle update packet for few sensors.\"\"\" pos = 0 packages_len =", "not None: orig_name_bytes = bytes(self._orig_proxy, \"utf-8\") orig_data_size = 3 + len(orig_name_bytes) else: orig_data_size", "cloud: %r\", e) @property def gateway_id(self) -> str: return self._id.hex().upper() @property def serial(self)", "\"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size = 181 packet = bytearray(packet_size)", "reboot takes about 10s for the gateway to be back up again ORIG_PROXY_BYTE1", "= package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def handle_sensors_update(self,", "server address is too long\") self._proxy = value @property def proxy_port(self) -> int:", "as response: response_content = await response.content.read() _LOGGER.debug( \"Cloud response status: %s content: %s\",", "\"\"\" if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port =", "given ID, creates the sensor if not exists.\"\"\" result = self._sensors.get(sensor_id, None) if", "Awaitable[None]] #: all communication with the gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #:", "@use_dhcp.setter def use_dhcp(self, value: bool) -> None: self._use_dhcp = value @property def fixed_ip(self)", "else: sock.bind((\"\", 0)) return sock @staticmethod def prepare_command(command: int, gateway_id: bytes) -> bytes:", "str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server address is too", "self.use_proxy else \"No\", self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if self.last_seen", "config is not None: self.parse_config(config) def _check_init(self) -> None: if not self._initialized: raise", "len(str_bytes) : 109] = str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110", "%s, checksum %s\", package.hex().upper(), hex(package_checksum), ) checksum = 0 for b in package:", "#: all communication with the gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP", "self._handler = handler def attach_to_proxy( self, proxy: str, proxy_port: int, handler: SensorHandler, )", "\".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time() def add_sensor(self, sensor: Sensor) ->", "\"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy = False", "await self.send_command(REBOOT, update_config, timeout) if update_config and config is not None: self.parse_config(config) @staticmethod", "settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2 to mark preserved original proxy", "\"big\") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally:", "content: bytes, ) -> None: \"\"\"Resend gateway's PUT request to cloud server.\"\"\" if", "the gateway.\"\"\" config = await self.get_config(timeout) if config is not None: return self.parse_config(config)", "def serial(self) -> str: return \"80\" + self._id[3:6].hex().upper() @property def version(self) -> str:", "e: _LOGGER.error(\"Error resending request to cloud: %r\", e) @property def gateway_id(self) -> str:", "Dict, List, Optional import asyncio import logging import socket import struct import time", "import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication with", "sensor_id: str) -> Sensor: \"\"\"Create new sensor object for given ID.\"\"\" result =", "gateway via UDP: DISCOVER_GATEWAYS = 1 #: Find any available gateway in the", "str: return str(self._proxy) @proxy.setter def proxy(self, value: str) -> None: if len(bytes(value, \"utf-8\"))", "str: return str(self._server) @server.setter def server(self, value: str) -> None: if len(bytes(value, \"utf-8\"))", "%s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\"", "= \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\" self.use_proxy =", "port used by the gateway for comunnications PORT = 8003 # Commands which", "str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 :", "+ 2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port = int.from_bytes(config[180:182],", "\"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create", "self.set_config() def handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle gateway's bootup update packet.\"\"\" if", "not None and (len(config) >= 186) and (len(config) == int.from_bytes(config[8:10], \"big\")) ) def", "self._dhcp_ip: Any = None self._use_dhcp: Any = None self._fixed_ip: Any = None self._fixed_netmask:", "Optional[float] = None self._attached = False self._orig_use_proxy: Any = None self._orig_proxy: Any =", "= True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns =", "self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy =", "Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return sensor", "-> None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property", "from the proxy and restore original settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy", "Port: %s\\n\" \"Send data to cloud: %s\\n\" \"Last Contact: %s\" ) % (", "packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] =", "\" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\"", "handler def attach_to_proxy( self, proxy: str, proxy_port: int, handler: SensorHandler, ) -> None:", "None self._server: Any = None self._use_proxy: Any = None self._proxy: Any = None", "\"\"\"Reboots the gateway and optional update configuration.\"\"\" config = await self.send_command(REBOOT, update_config, timeout)", "few seconds to do the update REBOOT = 5 #: A reboot takes", "value @property def proxy(self) -> str: return str(self._proxy) @proxy.setter def proxy(self, value: str)", "result: result = self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package: bytes, package_checksum: int)", "content: %s\", response.status, response_content.hex().upper(), ) except Exception as e: _LOGGER.error(\"Error resending request to", "the gateway SET_CONFIG = 4 #: Set a new configuration. Gateway takes a", "-> bool: \"\"\"Parses configuration obtained from the gateway.\"\"\" result = self.check_config(config) and (", "8080 self.set_config() async def reboot(self, update_config: bool, timeout: int = 30) -> None:", "UDP port used by the gateway for comunnications PORT = 8003 # Commands", "sensor: Sensor) -> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id:", "self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway", "the configuration of the gateway SET_CONFIG = 4 #: Set a new configuration.", "!= 0 str_end_pos = config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos", "= Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway) finally: sock.close() return result def set_handler( self,", "self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await", "+ 63], packages[pos + 63] ) pos += 64 async def handle_update(self, code:", "is not initialized\") @staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str], ) -> socket.socket:", "= socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address:", "gateway for comunnications PORT = 8003 # Commands which acceps gateway via UDP:", "str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) -> None: self._fixed_gateway = IPv4Address(value)", "= config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip = IPv4Address(config[16:20])", "a few seconds to do the update REBOOT = 5 #: A reboot", "async with aiohttp.ClientSession() as session: async with session.put( str(url), headers=headers, data=content ) as", "= \"255.255.255.255\" #: UDP port used by the gateway for comunnications PORT =", "= bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\")", "attach_to_proxy( self, proxy: str, proxy_port: int, handler: SensorHandler, ) -> None: \"\"\"Attachs the", "from yarl import URL from .sensor import Sensor _LOGGER = logging.getLogger(__name__) SensorHandler =", "values.\"\"\" self.name = \"MOBILEALERTS-Gateway\" self.use_dhcp = True self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\"", "self._fixed_gateway = IPv4Address(value) @property def name(self) -> str: return str(self._name) @name.setter def name(self,", "in the local network FIND_GATEWAY = 2 #: Find a single available gateway", "str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) -> None: self._fixed_dns = IPv4Address(value)", "data=content ) as response: response_content = await response.content.read() _LOGGER.debug( \"Cloud response status: %s", "True self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() #", "_LOGGER.error(\"Error resending request to cloud: %r\", e) @property def gateway_id(self) -> str: return", "gateway_id = config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address)", "if len(bytes(value, \"utf-8\")) > 20: raise ValueError(\"Name is too long\") self._name = value", "20: raise ValueError(\"Name is too long\") self._name = value @property def server(self) ->", "detach_from_proxy(self) -> None: \"\"\"Detachs the gateway from the proxy and restore original settings.\"\"\"", "self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str: \"\"\"Return a", "-> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str) ->", "self.fixed_ip = \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server", "in the local network GET_CONFIG = 3 #: Request the configuration of the", "self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask =", "time.time() self._initialized = True return result async def update_config(self, timeout: int = 2)", "is too long\") self._server = value @property def use_proxy(self) -> bool: return bool(self._use_proxy)", "@property def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) ->", "finally: sock.close() return result def set_handler( self, handler: Optional[SensorHandler], ) -> None: self._handler", "== \"00\": self.handle_bootup_update(packages) elif code == \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update", "the gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP port used by the", "== ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy =", "len(packages) while pos + 64 <= packages_len: await self.handle_sensor_update( packages[pos : pos +", "response_content.hex().upper(), ) except Exception as e: _LOGGER.error(\"Error resending request to cloud: %r\", e)", "\"Cloud response status: %s content: %s\", response.status, response_content.hex().upper(), ) except Exception as e:", "self._proxy_port.to_bytes(2, \"big\") packet[177:181] = self._fixed_dns.packed sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT))", "wait_for_result: loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time()", "int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return a formal representation of the gateway.\"\"\" return", "'Magic' byte #1 to mark preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74 #:", "= None, ) -> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address", "elif code == \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code %d, data", "str: return \"80\" + self._id[3:6].hex().upper() @property def version(self) -> str: return self._version @property", "the sensor if not exists.\"\"\" result = self._sensors.get(sensor_id, None) if not result: result", "value >= 64 * 1024: raise ValueError(\"Invalid proxy port number\") self._proxy_port = value", "\"\"\"Create new sensor object for given ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result) return", "Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config = await asyncio.wait_for(loop.sock_recv(sock,", "self.orig_proxy_port, ) def __str__(self) -> str: \"\"\"Return a readable representation of the gateway.\"\"\"", "None: if config is None: config = await self.get_config() if config is not", "\" \"fixed_gateway=%r, \" \"fixed_dns=%r, \" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \"", "= self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages: bytes) ->", "async def update_config(self, timeout: int = 2) -> bool: \"\"\"Updates configuration from the", "obtained from the gateway.\"\"\" result = self.check_config(config) and ( (self._id is None) or", "bool) -> None: self._use_proxy = value @property def proxy(self) -> str: return str(self._proxy)", "long\") self._server = value @property def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def", "\" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r, \" \"fixed_ip=%r, \" \"fixed_netmask=%r, \" \"fixed_gateway=%r, \"", "#: A reboot takes about 10s for the gateway to be back up", "= dict() self._initialized = False async def init( self, config: Optional[bytes] = None,", "proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2 to mark preserved original", "= bytes(str(self._proxy), \"utf-8\") packet[110 : 110 + len(str_bytes)] = str_bytes str_bytes = bytearray(65", "are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP port used by the gateway for", "64 <= packages_len: await self.handle_sensor_update( packages[pos : pos + 63], packages[pos + 63]", "self._orig_proxy = None self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) ->", "sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def", "is None: config = await self.get_config() if config is not None: self.parse_config(config) def", "= value @property def server(self) -> str: return str(self._server) @server.setter def server(self, value:", "Any = None self._fixed_gateway: Any = None self._name: Any = None self._server: Any", "sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256),", "= \"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server =", "code %d, data %s\", code, packages.hex().upper(), ) async def resend_data_to_cloud( self, url: URL,", "int) -> None: if value < 0 or value >= 64 * 1024:", "settings class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__( self, gateway_id: str, local_ip_address:", "config: bytes) -> bool: \"\"\"Parses configuration obtained from the gateway.\"\"\" result = self.check_config(config)", "#: 'Magic' byte #1 to mark preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74", "else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, \"Yes\" if self.use_proxy else \"No\",", "@use_proxy.setter def use_proxy(self, value: bool) -> None: self._use_proxy = value @property def proxy(self)", "def fixed_dns(self, value: str) -> None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) ->", "return result def set_handler( self, handler: Optional[SensorHandler], ) -> None: self._handler = handler", "value: str) -> None: self._fixed_gateway = IPv4Address(value) @property def name(self) -> str: return", "update packet for few sensors.\"\"\" pos = 0 packages_len = len(packages) while pos", "= self.create_sensor(sensor_id) return result async def handle_sensor_update(self, package: bytes, package_checksum: int) -> None:", "Optional[bytes] = None, ) -> None: if config is None: config = await", "\" \"server=%r, \" \"use_proxy=%r, \" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \"", "self.proxy = \"192.168.1.1\" self.proxy_port = 8080 self.set_config() async def reboot(self, update_config: bool, timeout:", "@server.setter def server(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise", "( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \"", "checksum = 0 for b in package: checksum += b checksum &= 0x7F", "= IPv4Address(value) @property def fixed_gateway(self) -> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value:", "114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0 str_end_pos = config.find(0, 115,", "return str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value:", ": 110 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos", "str, packages: bytes) -> None: \"\"\"Handle update packets.\"\"\" if code == \"00\": self.handle_bootup_update(packages)", "packet[2:8] = self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed", "28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114) if ( config[str_end_pos + 1] ==", "Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\" \"Send data", "None self._fixed_ip: Any = None self._fixed_netmask: Any = None self._fixed_gateway: Any = None", "== self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), ) self._version = (", "+ orig_part_size ] packet[175 - len(str_bytes) : 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2,", "= await self.get_config(timeout) if config is not None: return self.parse_config(config) else: return False", "pos += 64 async def handle_update(self, code: str, packages: bytes) -> None: \"\"\"Handle", "socket.socket: \"\"\"Prepares UDP socket to comunicate with the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)", "will be preserved \"\"\" if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy =", "send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud", "result = [] discovered = [] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address)", "str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen = time.time() def add_sensor(self, sensor: Sensor) -> None: \"\"\"Add", "> 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3)", "mark preserved original proxy settings ORIG_PROXY_BYTE2 = 0x74 #: 'Magic' byte #2 to", "Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) -> None: \"\"\"Reset", "\"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code %d, data %s\", code, packages.hex().upper(),", "sock.close() async def get_config(self, timeout: int = 2) -> Optional[bytes]: \"\"\"Obtains configuration from", "10s for the gateway to be back up again ORIG_PROXY_BYTE1 = 0x19 #:", "None self._use_dhcp: Any = None self._fixed_ip: Any = None self._fixed_netmask: Any = None", "def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) -> None:", "configuration obtained from the gateway.\"\"\" result = self.check_config(config) and ( (self._id is None)", "self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy = True", "result def get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return sensor object for given ID,", "return result async def handle_sensor_update(self, package: bytes, package_checksum: int) -> None: \"\"\"Handle update", "Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server:", "FIND_GATEWAY = 2 #: Find a single available gateway in the local network", "_LOGGER.error( \"Unknnow update code %d, data %s\", code, packages.hex().upper(), ) async def resend_data_to_cloud(", "self._use_proxy: Any = None self._proxy: Any = None self._proxy_port: Any = None self._fixed_dns:", "None: return self.parse_config(config) else: return False def set_config(self) -> None: \"\"\"Set configuration to", "value @property def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int)", "None: if not self._initialized: raise Exception(\"Gateway is not initialized\") @staticmethod def prepare_socket( timeout:", "186) and (len(config) == int.from_bytes(config[8:10], \"big\")) ) def parse_config(self, config: bytes) -> bool:", "orig_part_size ] packet[175 - len(str_bytes) : 175] = str_bytes packet[175:177] = self._proxy_port.to_bytes(2, \"big\")", "byte #2 to mark preserved original proxy settings class Gateway: \"\"\"Controls MobileAlerts internet", "def use_proxy(self) -> bool: return bool(self._use_proxy) @use_proxy.setter def use_proxy(self, value: bool) -> None:", "orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size = min(orig_data_size -", "= config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0 str_end_pos = config.find(0, 115, 180) self._proxy", "def handle_sensor_update(self, package: bytes, package_checksum: int) -> None: \"\"\"Handle update packet for one", "local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares UDP socket to comunicate with the gateway.\"\"\"", "&= 0x7F if checksum == package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor", "): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186])", "= 181 packet = bytearray(packet_size) packet[0:2] = command.to_bytes(2, \"big\") packet[2:8] = self._id packet[8:10]", "str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2]", "serial(self) -> str: return \"80\" + self._id[3:6].hex().upper() @property def version(self) -> str: return", "few sensors.\"\"\" pos = 0 packages_len = len(packages) while pos + 64 <=", "\")\" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen", "+= b checksum &= 0x7F if checksum == package_checksum: self._last_seen = time.time() sensor_id", "sock @staticmethod def prepare_command(command: int, gateway_id: bytes) -> bytes: \"\"\"Prepares command UDP packet", "@property def name(self) -> str: return str(self._name) @name.setter def name(self, value: str) ->", "and restore original settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port", "= 3 #: Request the configuration of the gateway SET_CONFIG = 4 #:", "15) and (package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\", time.ctime(int.from_bytes(package[1:5], \"big\")), )", "def use_proxy(self, value: bool) -> None: self._use_proxy = value @property def proxy(self) ->", "UDP: DISCOVER_GATEWAYS = 1 #: Find any available gateway in the local network", "== ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns", "-> str: return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) -> None: self._fixed_gateway =", "package: bytes, package_checksum: int) -> None: \"\"\"Handle update packet for one sensor.\"\"\" _LOGGER.debug(", "self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port,", "True self._sensors: Dict[str, Sensor] = dict() self._initialized = False async def init( self,", "None self._use_proxy: Any = None self._proxy: Any = None self._proxy_port: Any = None", "\" \"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen)", ") -> None: if config is None: config = await self.get_config() if config", "Any = None self._use_dhcp: Any = None self._fixed_ip: Any = None self._fixed_netmask: Any", "return str(self._fixed_gateway) @fixed_gateway.setter def fixed_gateway(self, value: str) -> None: self._fixed_gateway = IPv4Address(value) @property", "asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return config else:", "\"Proxy Port: %s\\n\" \"Send data to cloud: %s\\n\" \"Last Contact: %s\" ) %", "str_bytes str_bytes = bytes(self._server, \"utf-8\") packet[44 : 44 + len(str_bytes)] = str_bytes str_bytes", "timeout: int = 2) -> Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\" return await", "self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str] = None, timeout: int = 2,", "None self._orig_proxy_port = None self.set_handler(None) self.set_config() def handle_bootup_update(self, package: bytes) -> None: \"\"\"Handle", "8003 # Commands which acceps gateway via UDP: DISCOVER_GATEWAYS = 1 #: Find", "pos + 63], packages[pos + 63] ) pos += 64 async def handle_update(self,", "import aiohttp from multidict import CIMultiDictProxy from yarl import URL from .sensor import", "-> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) -> None: self._fixed_netmask =", "= 0x74 #: 'Magic' byte #2 to mark preserved original proxy settings class", "Any = None self._proxy: Any = None self._proxy_port: Any = None self._fixed_dns: Any", "orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes", "Optional import asyncio import logging import socket import struct import time from ipaddress", "bytes(self._name, \"utf-8\") packet[23 : 23 + len(str_bytes)] = str_bytes str_bytes = bytes(21 -", "def version(self) -> str: return self._version @property def last_seen(self) -> Optional[float]: return self._last_seen", "packet[15:19] = self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23 : 23", "self.proxy, self.proxy_port, \"Yes\" if self.send_data_to_cloud else \"No\", time.ctime(self.last_seen) if self.last_seen is not None", "DHCP: %s\\n\" \"DHCP IP: %s\\n\" \"Fixed IP: %s\\n\" \"Fixed Netmask: %s\\n\" \"Fixed Gateway:", "= sensor def create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create new sensor object for", "self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy", "= bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) : 44] = str_bytes str_bytes =", "self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def", "\"\"\"Broadcasts discover packet and yeld gateway objects created from resposes.\"\"\" result = []", "bytes) -> None: \"\"\"Handle gateway's bootup update packet.\"\"\" if (len(package) == 15) and", ") self._last_seen = time.time() def add_sensor(self, sensor: Sensor) -> None: \"\"\"Add sensor object.\"\"\"", "= config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2]", "sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages: bytes)", ": 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3:", "self, handler: Optional[SensorHandler], ) -> None: self._handler = handler def attach_to_proxy( self, proxy:", "IPv4Address(config[24:28]) self._name = config[28 : config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114)", "if (len(package) == 15) and (package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup timestamp %s\",", "64 async def handle_update(self, code: str, packages: bytes) -> None: \"\"\"Handle update packets.\"\"\"", "def attach_to_proxy( self, proxy: str, proxy_port: int, handler: SensorHandler, ) -> None: \"\"\"Attachs", "which acceps gateway via UDP: DISCOVER_GATEWAYS = 1 #: Find any available gateway", "self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler] = None self._version = \"1.50\" self._last_seen: Optional[float]", "\"255.255.255.255\" #: UDP port used by the gateway for comunnications PORT = 8003", "async with session.put( str(url), headers=headers, data=content ) as response: response_content = await response.content.read()", "= self._id packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19]", "config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except socket.timeout: break except asyncio.TimeoutError: break if", "\"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r, \" \"dhcp_ip=%r, \" \"use_dhcp=%r,", "comunnications PORT = 8003 # Commands which acceps gateway via UDP: DISCOVER_GATEWAYS =", "self._proxy_port self._attached = True self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port", "if config is not None: self.parse_config(config) def _check_init(self) -> None: if not self._initialized:", "def proxy(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy", "a new configuration. Gateway takes a few seconds to do the update REBOOT", "acceps gateway via UDP: DISCOVER_GATEWAYS = 1 #: Find any available gateway in", "gateway_id: str, local_ip_address: Optional[str] = None, ) -> None: self._id: bytes = bytes.fromhex(gateway_id)", "self._last_seen = time.time() def add_sensor(self, sensor: Sensor) -> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id]", "def prepare_socket( timeout: int, local_ip_address: Optional[str], ) -> socket.socket: \"\"\"Prepares UDP socket to", "= handler def attach_to_proxy( self, proxy: str, proxy_port: int, handler: SensorHandler, ) ->", "#: 'Magic' byte #2 to mark preserved original proxy settings class Gateway: \"\"\"Controls", "\" \"proxy=%r, \" \"proxy_port=%r, \" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) %", "None: \"\"\"Reboots the gateway and optional update configuration.\"\"\" config = await self.send_command(REBOOT, update_config,", "= value @property def fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value:", "= time.time() return config else: return None finally: sock.close() async def get_config(self, timeout:", "socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\",", ": config.find(0, 28, 49)].decode(\"utf-8\") str_end_pos = config.find(0, 49, 114) if ( config[str_end_pos +", "checksum += b checksum &= 0x7F if checksum == package_checksum: self._last_seen = time.time()", "multidict import CIMultiDictProxy from yarl import URL from .sensor import Sensor _LOGGER =", "self._orig_proxy_port self._attached = False self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port = None", "the gateway for comunnications PORT = 8003 # Commands which acceps gateway via", "= 2 #: Find a single available gateway in the local network GET_CONFIG", "= self._orig_proxy_port self._attached = False self._orig_use_proxy = None self._orig_proxy = None self._orig_proxy_port =", "orig_data[0] = self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0", "try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) if wait_for_result: loop = asyncio.get_event_loop() config = await asyncio.wait_for(loop.sock_recv(sock,", "comunicate with the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST,", "dhcp_ip(self) -> str: return str(self._dhcp_ip) @property def use_dhcp(self) -> bool: return bool(self._use_dhcp) @use_dhcp.setter", "return str(self._proxy) @proxy.setter def proxy(self, value: str) -> None: if len(bytes(value, \"utf-8\")) >", "fixed_ip(self) -> str: return str(self._fixed_ip) @fixed_ip.setter def fixed_ip(self, value: str) -> None: self._fixed_ip", "with the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)", "if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server address is too long\") self._server =", ") -> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str] = local_ip_address self._handler: Optional[SensorHandler]", "= False, timeout: int = 2 ) -> Optional[bytes]: \"\"\"Sends command and optional", "PUT request to cloud server.\"\"\" if self._send_data_to_cloud: try: async with aiohttp.ClientSession() as session:", "int) -> None: \"\"\"Handle update packet for one sensor.\"\"\" _LOGGER.debug( \"Update package %s,", "-> None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Proxy server address is too", "return str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self) -> str:", "self.serial, self.gateway_id, \"Yes\" if self.use_dhcp else \"No\", self.dhcp_ip, self.fixed_ip, self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server,", "- len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1 str_bytes[2] = ORIG_PROXY_BYTE2 orig_part_size", "Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try:", "def proxy(self) -> str: return str(self._proxy) @proxy.setter def proxy(self, value: str) -> None:", "packet[8:10] = packet_size.to_bytes(2, \"big\") packet[10] = self._use_dhcp packet[11:15] = self._fixed_ip.packed packet[15:19] = self._fixed_netmask.packed", "else: orig_data_size = 0 orig_data = bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] =", "self.fixed_netmask, self.fixed_gateway, self.fixed_dns, self.server, self.use_proxy, self.proxy, self.proxy_port, self.orig_use_proxy, self.orig_proxy, self.orig_proxy_port, ) def __str__(self)", "proxy port number\") self._proxy_port = value @property def fixed_dns(self) -> str: return str(self._fixed_dns)", "self.gateway_id, self.version, time.ctime(self.last_seen) if self.last_seen is not None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip,", "command and optional data to the gateway.\"\"\" packet = self.prepare_command(command, self._id) sock =", "%s\\n\" \"Send data to cloud: %s\\n\" \"Last Contact: %s\" ) % ( self.name,", "local_ip_address: Optional[str] = None, ) -> None: self._id: bytes = bytes.fromhex(gateway_id) self._local_ip_address: Optional[str]", "preserved \"\"\" if self._orig_use_proxy is None: self._orig_use_proxy = self._use_proxy self._orig_proxy = self._proxy self._orig_proxy_port", "else: return None finally: sock.close() async def get_config(self, timeout: int = 2) ->", "len(orig_data) > 3: self._orig_use_proxy = orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0,", "-> str: return str(self._orig_proxy) @property def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self)", "\"utf-8\") packet[110 : 110 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes))", "#2 to mark preserved original proxy settings class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\"", "self._use_dhcp: Any = None self._fixed_ip: Any = None self._fixed_netmask: Any = None self._fixed_gateway:", "socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) sock.setblocking(False) sock.settimeout(timeout) if local_ip_address: sock.bind((local_ip_address, 0))", "str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] = ORIG_PROXY_BYTE1", "do the update REBOOT = 5 #: A reboot takes about 10s for", "for the gateway to be back up again ORIG_PROXY_BYTE1 = 0x19 #: 'Magic'", "if value < 0 or value >= 64 * 1024: raise ValueError(\"Invalid proxy", "self.check_config(config) and ( (self._id is None) or (self._id == config[2:8]) ) if result:", "proxy settings class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__( self, gateway_id: str,", "= int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data) > 3: self._orig_use_proxy = orig_data[0]", "self._proxy_port: Any = None self._fixed_dns: Any = None self._send_data_to_cloud = True self._sensors: Dict[str,", "0 self._fixed_ip = IPv4Address(config[16:20]) self._fixed_netmask = IPv4Address(config[20:24]) self._fixed_gateway = IPv4Address(config[24:28]) self._name = config[28", "+ 3 : 180]) self._proxy_port = int.from_bytes(config[180:182], \"big\") self._fixed_dns = IPv4Address(config[182:186]) if len(orig_data)", "try: async with aiohttp.ClientSession() as session: async with session.put( str(url), headers=headers, data=content )", "self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached = False self._orig_use_proxy", "of the gateway.\"\"\" return ( \"%s V%s, SerialNo: %s (id: %s)\\n\" \"Use DHCP:", "None: \"\"\"Handle gateway's bootup update packet.\"\"\" if (len(package) == 15) and (package[5:11] ==", "Sensor _LOGGER = logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication with the", "address is too long\") self._proxy = value @property def proxy_port(self) -> int: return", "def fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) -> None:", "None self._fixed_dns: Any = None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] = dict()", "await self.handle_sensor_update( packages[pos : pos + 63], packages[pos + 63] ) pos +=", "self._name = value @property def server(self) -> str: return str(self._server) @server.setter def server(self,", "return bool(self._use_dhcp) @use_dhcp.setter def use_dhcp(self, value: bool) -> None: self._use_dhcp = value @property", "for given ID.\"\"\" result = Sensor(self, sensor_id) self.add_sensor(result) return result def get_sensor(self, sensor_id:", "while pos + 64 <= packages_len: await self.handle_sensor_update( packages[pos : pos + 63],", "code: str, packages: bytes) -> None: \"\"\"Handle update packets.\"\"\" if code == \"00\":", "self.last_seen is not None else \"never\", self.attached, self.send_data_to_cloud, self.dhcp_ip, self.use_dhcp, self.fixed_ip, self.fixed_netmask, self.fixed_gateway,", "or value >= 64 * 1024: raise ValueError(\"Invalid proxy port number\") self._proxy_port =", "sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) except", "def send_command( self, command: int, wait_for_result: bool = False, timeout: int = 2", "via UDP: DISCOVER_GATEWAYS = 1 #: Find any available gateway in the local", "str_bytes packet[109] = self._use_proxy str_bytes = bytes(str(self._proxy), \"utf-8\") packet[110 : 110 + len(str_bytes)]", "0 orig_data = bytearray(orig_data_size) if orig_data_size > 0: orig_data[0] = self._orig_use_proxy orig_data[1:3] =", "\"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy", "get_config(self, timeout: int = 2) -> Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\" return", "to comunicate with the gateway.\"\"\" sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET,", "114) if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos + 2] ==", "@staticmethod def check_config(config: bytes) -> bool: return ( config is not None and", "discovered = [] loop = asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS,", "= config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await", "settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port self._attached", "bytearray() self._id = config[2:8] self._dhcp_ip = IPv4Address(config[11:15]) self._use_dhcp = config[15] != 0 self._fixed_ip", "self.orig_proxy, self.orig_proxy_port, ) def __str__(self) -> str: \"\"\"Return a readable representation of the", "package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler:", "False self._orig_use_proxy: Any = None self._orig_proxy: Any = None self._orig_proxy_port: Any = None", "str(self._server) @server.setter def server(self, value: str) -> None: if len(bytes(value, \"utf-8\")) > 64:", "= time.time() self._initialized = True return result async def update_config(self, timeout: int =", "any available gateway in the local network FIND_GATEWAY = 2 #: Find a", "\"00\": self.handle_bootup_update(packages) elif code == \"C0\": await self.handle_sensors_update(packages) else: _LOGGER.error( \"Unknnow update code", "= self._fixed_netmask.packed packet[19:23] = self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23 : 23 +", "use_dhcp(self, value: bool) -> None: self._use_dhcp = value @property def fixed_ip(self) -> str:", "None: if value < 0 or value >= 64 * 1024: raise ValueError(\"Invalid", "= self.check_config(config) and ( (self._id is None) or (self._id == config[2:8]) ) if", "64: raise ValueError(\"Proxy server address is too long\") self._proxy = value @property def", "if gateway_id in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(), local_ip_address) await gateway.init(config) result.append(gateway)", "bytes, package_checksum: int) -> None: \"\"\"Handle update packet for one sensor.\"\"\" _LOGGER.debug( \"Update", "to read measuremnts. Existing proxy settings will be preserved \"\"\" if self._orig_use_proxy is", "-> str: return str(self._server) @server.setter def server(self, value: str) -> None: if len(bytes(value,", "= False async def init( self, config: Optional[bytes] = None, ) -> None:", "gateway objects created from resposes.\"\"\" result = [] discovered = [] loop =", "None and (len(config) >= 186) and (len(config) == int.from_bytes(config[8:10], \"big\")) ) def parse_config(self,", "self, command: int, wait_for_result: bool = False, timeout: int = 2 ) ->", "fixed_dns(self) -> str: return str(self._fixed_dns) @fixed_dns.setter def fixed_dns(self, value: str) -> None: self._fixed_dns", "%s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\"", "restore original settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port =", "raise Exception(\"Gateway is not initialized\") @staticmethod def prepare_socket( timeout: int, local_ip_address: Optional[str], )", "is not None: self.parse_config(config) @staticmethod async def discover( local_ip_address: Optional[str] = None, timeout:", "= time.time() def add_sensor(self, sensor: Sensor) -> None: \"\"\"Add sensor object.\"\"\" self._sensors[sensor.sensor_id] =", "%s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy: %s\\n\" \"Proxy Server: %s\\n\" \"Proxy Port: %s\\n\"", "await response.content.read() _LOGGER.debug( \"Cloud response status: %s content: %s\", response.status, response_content.hex().upper(), ) except", "def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) -> None:", "self._id.hex().upper() @property def serial(self) -> str: return \"80\" + self._id[3:6].hex().upper() @property def version(self)", "DISCOVER_GATEWAYS = 1 #: Find any available gateway in the local network FIND_GATEWAY", "gateway.\"\"\" return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \" \"attached=%r, \"", "preserved original proxy settings class Gateway: \"\"\"Controls MobileAlerts internet gataway.\"\"\" def __init__( self,", "self.send_command(REBOOT, update_config, timeout) if update_config and config is not None: self.parse_config(config) @staticmethod async", "= \"www.data199.com\" self.use_proxy = False self.proxy = \"192.168.1.1\" self.proxy_port = 8080 self.set_config() async", "time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id) sensor.parse_packet(package) if self._handler: await self._handler(sensor) async", "str) -> None: self._fixed_gateway = IPv4Address(value) @property def name(self) -> str: return str(self._name)", "Dict[str, Sensor] = dict() self._initialized = False async def init( self, config: Optional[bytes]", "self._orig_use_proxy orig_data[1:3] = self._orig_proxy_port.to_bytes(2, \"big\") orig_data[3:orig_data_size] = orig_name_bytes orig_data_pos = 0 packet_size =", "-> None: \"\"\"Attachs the gateway to the proxy to read measuremnts. Existing proxy", "if local_ip_address: sock.bind((local_ip_address, 0)) else: sock.bind((\"\", 0)) return sock @staticmethod def prepare_command(command: int,", "@proxy_port.setter def proxy_port(self, value: int) -> None: if value < 0 or value", "too long\") self._proxy = value @property def proxy_port(self) -> int: return int(self._proxy_port) @proxy_port.setter", "proxy: str, proxy_port: int, handler: SensorHandler, ) -> None: \"\"\"Attachs the gateway to", "return ( config is not None and (len(config) >= 186) and (len(config) ==", "as e: _LOGGER.error(\"Error resending request to cloud: %r\", e) @property def gateway_id(self) ->", "# Commands which acceps gateway via UDP: DISCOVER_GATEWAYS = 1 #: Find any", "len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size] = orig_data[ orig_data_pos : orig_data_pos", "the proxy to read measuremnts. Existing proxy settings will be preserved \"\"\" if", "str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) -> None: self._fixed_netmask = IPv4Address(value) @property def", "= str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos < orig_data_size: str_bytes[1] =", "update_config: bool, timeout: int = 30) -> None: \"\"\"Reboots the gateway and optional", "= self._fixed_gateway.packed str_bytes = bytes(self._name, \"utf-8\") packet[23 : 23 + len(str_bytes)] = str_bytes", "\"192.168.1.222\" self.fixed_netmask = \"255.255.255.0\" self.fixed_gateway = \"192.168.1.254\" self.fixed_dns = \"192.168.1.253\" self.server = \"www.data199.com\"", "= min(orig_data_size - orig_data_pos, len(str_bytes) - 3) str_bytes[3 : 3 + orig_part_size] =", "sensor object.\"\"\" self._sensors[sensor.sensor_id] = sensor def create_sensor(self, sensor_id: str) -> Sensor: \"\"\"Create new", "= len(packages) while pos + 64 <= packages_len: await self.handle_sensor_update( packages[pos : pos", "if not self._initialized: raise Exception(\"Gateway is not initialized\") @staticmethod def prepare_socket( timeout: int,", "handle_sensor_update(self, package: bytes, package_checksum: int) -> None: \"\"\"Handle update packet for one sensor.\"\"\"", "self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\") self._last_seen = time.time() self._initialized = True return result async def", "bytes(21 - len(str_bytes)) packet[44 - len(str_bytes) : 44] = str_bytes str_bytes = bytes(self._server,", "self.parse_config(config) else: return False def set_config(self) -> None: \"\"\"Set configuration to the gateway.\"\"\"", "logging import socket import struct import time from ipaddress import IPv4Address import aiohttp", "local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) while True: try: config", ") -> List[\"Gateway\"]: \"\"\"Broadcasts discover packet and yeld gateway objects created from resposes.\"\"\"", "None self._fixed_netmask: Any = None self._fixed_gateway: Any = None self._name: Any = None", "self._version = ( str(int.from_bytes(package[11:13], \"big\")) + \".\" + str(int.from_bytes(package[13:15], \"big\")) ) self._last_seen =", "= Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) -> None:", "sock = Gateway.prepare_socket(1, self._local_ip_address) try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) ->", "== config[2:8]) ) if result: orig_data = bytearray() self._id = config[2:8] self._dhcp_ip =", "return ( \"%s.%s(%s(%s), \" \"gateway_id=%s, \" \"version=%r, \" \"last_seen=%r, \" \"attached=%r, \" \"send_data_to_cloud=%r,", "try: sock.sendto(packet, (BROADCAST_ADDR, PORT)) finally: sock.close() def reset_config(self) -> None: \"\"\"Reset configuration of", "local network FIND_GATEWAY = 2 #: Find a single available gateway in the", "None self._send_data_to_cloud = True self._sensors: Dict[str, Sensor] = dict() self._initialized = False async", "return int(self._proxy_port) @proxy_port.setter def proxy_port(self, value: int) -> None: if value < 0", "= True self._sensors: Dict[str, Sensor] = dict() self._initialized = False async def init(", "str) -> None: self._fixed_dns = IPv4Address(value) @property def orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy)", ": 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy = config[114] != 0 str_end_pos = config.find(0,", "and yeld gateway objects created from resposes.\"\"\" result = [] discovered = []", "= None self._proxy: Any = None self._proxy_port: Any = None self._fixed_dns: Any =", "= orig_data[0] self._orig_proxy_port = int.from_bytes(orig_data[1:3], \"big\") str_end_pos = orig_data.find(0, 3) self._orig_proxy = orig_data[3:str_end_pos].decode(\"utf-8\")", "110 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos <", "-> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None: self._send_data_to_cloud =", "44 + len(str_bytes)] = str_bytes str_bytes = bytearray(65 - len(str_bytes)) if orig_data_pos <", "def get_config(self, timeout: int = 2) -> Optional[bytes]: \"\"\"Obtains configuration from the gateway.\"\"\"", "Gateway takes a few seconds to do the update REBOOT = 5 #:", "gateway.\"\"\" return await self.send_command(FIND_GATEWAY, True, timeout) @staticmethod def check_config(config: bytes) -> bool: return", "Set a new configuration. Gateway takes a few seconds to do the update", "str_end_pos = config.find(0, 115, 180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1]", "Existing proxy settings will be preserved \"\"\" if self._orig_use_proxy is None: self._orig_use_proxy =", "self._handler: await self._handler(sensor) async def handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle update packet", "def orig_proxy_port(self) -> int: return int(self._orig_proxy_port) def __repr__(self) -> str: \"\"\"Return a formal", "def get_sensor(self, sensor_id: str) -> Sensor: \"\"\"Return sensor object for given ID, creates", "asyncio.get_event_loop() sock = Gateway.prepare_socket(timeout, local_ip_address) packet = Gateway.prepare_command(DISCOVER_GATEWAYS, bytearray(6)) try: sock.sendto(packet, (BROADCAST_ADDR, PORT))", "Netmask: %s\\n\" \"Fixed Gateway: %s\\n\" \"Fixed DNS: %s\\n\" \"Cloud Server: %s\\n\" \"Use Proxy:", "pos = 0 packages_len = len(packages) while pos + 64 <= packages_len: await", "= self._proxy self._orig_proxy_port = self._proxy_port self._attached = True self._use_proxy = True self._proxy =", "= logging.getLogger(__name__) SensorHandler = Callable[[Sensor], Awaitable[None]] #: all communication with the gateways are", "0)) else: sock.bind((\"\", 0)) return sock @staticmethod def prepare_command(command: int, gateway_id: bytes) ->", "None: self._handler = handler def attach_to_proxy( self, proxy: str, proxy_port: int, handler: SensorHandler,", "True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler) self.set_config() # await self.get_config() def", "180) self._proxy = config[115:str_end_pos].decode(\"utf-8\") if ( config[str_end_pos + 1] == ORIG_PROXY_BYTE1 and config[str_end_pos", "self._attached = True self._use_proxy = True self._proxy = IPv4Address(proxy) self._proxy_port = proxy_port self.set_handler(handler)", "gateways are broadcasts BROADCAST_ADDR = \"255.255.255.255\" #: UDP port used by the gateway", "2] == ORIG_PROXY_BYTE2 ): orig_data.extend(config[str_end_pos + 3 : 114]) self._server = config[49:str_end_pos].decode(\"utf-8\") self._use_proxy", "self._fixed_netmask: Any = None self._fixed_gateway: Any = None self._name: Any = None self._server:", "import Any, Awaitable, Callable, Dict, List, Optional import asyncio import logging import socket", "original settings.\"\"\" if self._attached: self._use_proxy = self._orig_use_proxy self._proxy = self._orig_proxy self._proxy_port = self._orig_proxy_port", "self._handler(sensor) async def handle_sensors_update(self, packages: bytes) -> None: \"\"\"Handle update packet for few", "Gateway.check_config(config): gateway_id = config[2:8] if gateway_id in discovered: continue discovered.append(gateway_id) gateway = Gateway(gateway_id.hex().upper(),", "gateway.\"\"\" config = await self.get_config(timeout) if config is not None: return self.parse_config(config) else:", "packet.\"\"\" if (len(package) == 15) and (package[5:11] == self._id): _LOGGER.debug( \"Gateway bootup timestamp", "None: if len(bytes(value, \"utf-8\")) > 64: raise ValueError(\"Server address is too long\") self._server", "\"www.data199.com\" self.use_proxy = False self.proxy = \"192.168.1.1\" self.proxy_port = 8080 self.set_config() async def", "#: Find a single available gateway in the local network GET_CONFIG = 3", "-> bool: \"\"\"Updates configuration from the gateway.\"\"\" config = await self.get_config(timeout) if config", "def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) -> None:", "config = await asyncio.wait_for(loop.sock_recv(sock, 256), timeout) self._last_seen = time.time() return config else: return", "gataway.\"\"\" def __init__( self, gateway_id: str, local_ip_address: Optional[str] = None, ) -> None:", "fixed_netmask(self) -> str: return str(self._fixed_netmask) @fixed_netmask.setter def fixed_netmask(self, value: str) -> None: self._fixed_netmask", "def detach_from_proxy(self) -> None: \"\"\"Detachs the gateway from the proxy and restore original", "if checksum == package_checksum: self._last_seen = time.time() sensor_id = package[6:12].hex().upper() sensor = self.get_sensor(sensor_id)", "orig_use_proxy(self) -> bool: return bool(self._orig_use_proxy) @property def orig_proxy(self) -> str: return str(self._orig_proxy) @property", "\" \"orig_use_proxy=%r, \" \"orig_proxy=%r, \" \"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name,", "prepare_command(command: int, gateway_id: bytes) -> bytes: \"\"\"Prepares command UDP packet to send.\"\"\" packet", "None) or (self._id == config[2:8]) ) if result: orig_data = bytearray() self._id =", "value: bool) -> None: self._use_proxy = value @property def proxy(self) -> str: return", "Find a single available gateway in the local network GET_CONFIG = 3 #:", "@property def send_data_to_cloud(self) -> bool: return self._send_data_to_cloud @send_data_to_cloud.setter def send_data_to_cloud(self, value: bool) ->", "Any = None self._orig_proxy: Any = None self._orig_proxy_port: Any = None self._dhcp_ip: Any", "SET_CONFIG = 4 #: Set a new configuration. Gateway takes a few seconds", "\"orig_proxy_port=%r\" \")\" ) % ( self.__class__.__module__, self.__class__.__qualname__, self.name, self.serial, self.gateway_id, self.version, time.ctime(self.last_seen) if", "configuration from the gateway.\"\"\" config = await self.get_config(timeout) if config is not None:", "return str(self._server) @server.setter def server(self, value: str) -> None: if len(bytes(value, \"utf-8\")) >", "self._last_seen = time.time() return config else: return None finally: sock.close() async def get_config(self," ]