code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
import collections
import json
import os
from tqdm import tqdm
from loader.Database import DBViewIndex, DBView, DBManager, check_target_path
from exporter.Shared import AbilityData, SkillData, PlayerAction, snakey
from exporter.Mappings import CLASS_TYPES
class UnionAbility(DBView):
def __init__(self, index):
super().__init__(index, "UnionAbility", labeled_fields=["_Name"])
def process_result(self, res):
for i in (1, 2, 3, 4, 5):
self.link(res, f"_AbilityId{i}", "AbilityData")
return res
def export_all_to_folder(self, out_dir="./out", ext=".json"):
processed_res = [self.process_result(res) for res in self.get_all()]
with open(os.path.join(out_dir, f"_union{ext}"), "w", newline="", encoding="utf-8") as fp:
json.dump(processed_res, fp, indent=2, ensure_ascii=False, default=str)
class AbilityCrestBuildupGroup(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestBuildupGroup")
class AbilityCrestBuildupLevel(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestBuildupLevel")
class AbilityCrestRarity(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestRarity")
class AbilityCrestTrade(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestTrade")
class AbilityCrest(DBView):
def __init__(self, index):
super().__init__(
index,
"AbilityCrest",
labeled_fields=["_Name", "_Text1", "_Text2", "_Text3", "_Text4", "_Text5"],
)
def process_result(self, res, full_abilities=False):
inner = (1, 2, 3) if full_abilities else (3,)
outer = (1, 2, 3)
for i in outer:
for j in inner:
k = f"_Abilities{i}{j}"
if k in res and res[k]:
res[k] = self.index["AbilityData"].get(res[k], full_query=True)
if uab := res.get("_UnionAbilityGroupId"):
res["_UnionAbilityGroupId"] = self.index["UnionAbility"].get(uab)
if (trade_data := self.index["AbilityCrestTrade"].get(res["_Id"], by="_AbilityCrestId")) :
res["_TradeData"] = trade_data
return res
def get(self, pk, by="_Name", fields=None, full_query=True, full_abilities=False):
res = super().get(pk, by=by, fields=fields)
if not full_query:
return res
return self.process_result(res, full_abilities)
@staticmethod
def outfile_name(res, ext=".json"):
name = "UNKNOWN" if "_Name" not in res else res["_Name"]
# FIXME: do better name sanitation here
return snakey(f'{res["_BaseId"]}_{res["_VariationId"]:02}_{name}{ext}')
def export_all_to_folder(self, out_dir="./out", ext=".json"):
out_dir = os.path.join(out_dir, "wyrmprints")
all_res = self.get_all()
check_target_path(out_dir)
duplicates = collections.defaultdict(list)
for res in all_res:
duplicates[self.outfile_name(res, ext)].append(res)
for out_name, res_list in tqdm(duplicates.items(), desc=os.path.basename(out_dir)):
res_list = [self.process_result(res) for res in res_list]
main_res = res_list[0]
main_res_id = main_res["_Id"]
if len(res_list) > 1:
keys_that_differ = set()
id_to_sub_res = {}
for sub_res in res_list[1:]:
id_to_sub_res[sub_res["_Id"]] = sub_res
for key in sub_res:
if sub_res[key] != main_res[key]:
keys_that_differ.add(key)
for key in keys_that_differ:
main_res[key] = {main_res_id: main_res[key]}
for sub_res_id, sub_res in id_to_sub_res.items():
main_res[key][sub_res_id] = sub_res[key]
output = os.path.join(out_dir, out_name)
with open(output, "w", newline="", encoding="utf-8") as fp:
json.dump(main_res, fp, indent=2, ensure_ascii=False, default=str)
if __name__ == "__main__":
index = DBViewIndex()
view = AbilityCrest(index)
view.export_all_to_folder() | exporter/Wyrmprints.py | import collections
import json
import os
from tqdm import tqdm
from loader.Database import DBViewIndex, DBView, DBManager, check_target_path
from exporter.Shared import AbilityData, SkillData, PlayerAction, snakey
from exporter.Mappings import CLASS_TYPES
class UnionAbility(DBView):
def __init__(self, index):
super().__init__(index, "UnionAbility", labeled_fields=["_Name"])
def process_result(self, res):
for i in (1, 2, 3, 4, 5):
self.link(res, f"_AbilityId{i}", "AbilityData")
return res
def export_all_to_folder(self, out_dir="./out", ext=".json"):
processed_res = [self.process_result(res) for res in self.get_all()]
with open(os.path.join(out_dir, f"_union{ext}"), "w", newline="", encoding="utf-8") as fp:
json.dump(processed_res, fp, indent=2, ensure_ascii=False, default=str)
class AbilityCrestBuildupGroup(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestBuildupGroup")
class AbilityCrestBuildupLevel(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestBuildupLevel")
class AbilityCrestRarity(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestRarity")
class AbilityCrestTrade(DBView):
def __init__(self, index):
super().__init__(index, "AbilityCrestTrade")
class AbilityCrest(DBView):
def __init__(self, index):
super().__init__(
index,
"AbilityCrest",
labeled_fields=["_Name", "_Text1", "_Text2", "_Text3", "_Text4", "_Text5"],
)
def process_result(self, res, full_abilities=False):
inner = (1, 2, 3) if full_abilities else (3,)
outer = (1, 2, 3)
for i in outer:
for j in inner:
k = f"_Abilities{i}{j}"
if k in res and res[k]:
res[k] = self.index["AbilityData"].get(res[k], full_query=True)
if uab := res.get("_UnionAbilityGroupId"):
res["_UnionAbilityGroupId"] = self.index["UnionAbility"].get(uab)
if (trade_data := self.index["AbilityCrestTrade"].get(res["_Id"], by="_AbilityCrestId")) :
res["_TradeData"] = trade_data
return res
def get(self, pk, by="_Name", fields=None, full_query=True, full_abilities=False):
res = super().get(pk, by=by, fields=fields)
if not full_query:
return res
return self.process_result(res, full_abilities)
@staticmethod
def outfile_name(res, ext=".json"):
name = "UNKNOWN" if "_Name" not in res else res["_Name"]
# FIXME: do better name sanitation here
return snakey(f'{res["_BaseId"]}_{res["_VariationId"]:02}_{name}{ext}')
def export_all_to_folder(self, out_dir="./out", ext=".json"):
out_dir = os.path.join(out_dir, "wyrmprints")
all_res = self.get_all()
check_target_path(out_dir)
duplicates = collections.defaultdict(list)
for res in all_res:
duplicates[self.outfile_name(res, ext)].append(res)
for out_name, res_list in tqdm(duplicates.items(), desc=os.path.basename(out_dir)):
res_list = [self.process_result(res) for res in res_list]
main_res = res_list[0]
main_res_id = main_res["_Id"]
if len(res_list) > 1:
keys_that_differ = set()
id_to_sub_res = {}
for sub_res in res_list[1:]:
id_to_sub_res[sub_res["_Id"]] = sub_res
for key in sub_res:
if sub_res[key] != main_res[key]:
keys_that_differ.add(key)
for key in keys_that_differ:
main_res[key] = {main_res_id: main_res[key]}
for sub_res_id, sub_res in id_to_sub_res.items():
main_res[key][sub_res_id] = sub_res[key]
output = os.path.join(out_dir, out_name)
with open(output, "w", newline="", encoding="utf-8") as fp:
json.dump(main_res, fp, indent=2, ensure_ascii=False, default=str)
if __name__ == "__main__":
index = DBViewIndex()
view = AbilityCrest(index)
view.export_all_to_folder() | 0.29696 | 0.090013 |
import csv
import logging
import os
import time
import traceback
from argparse import ArgumentParser
import coloredlogs
from datetime import datetime
import warnings
import matplotlib
matplotlib.use("Agg")
import numpy as np
from fastai.vision import *
from sklearn.metrics import recall_score
def configure_logging():
coloredlogs.install(level="INFO")
coloredlogs.DEFAULT_LEVEL_STYLES = {
"debug": {"color": "white", "bold": False},
"info": {"color": "white", "bold": True},
"warning": {"color": "yellow", "bold": True},
"error": {"color": "red", "bold": True},
"fatal": {"color": "magenta", "bold": True},
}
logger = logging.getLogger("isic")
log_format = "%(asctime)s %(levelname)s %(message)s"
formatter = coloredlogs.ColoredFormatter(log_format)
for handler in logger.handlers:
handler.setFormatter(formatter)
logger.propagate = False
def parse_arguments():
parser = ArgumentParser()
parser.add_argument("--eid", type=str, required=True)
parser.add_argument("--epochs", type=int, default=50)
parser.add_argument("--data-dir", type=str, required=True)
parser.add_argument("--cycle1", action="store_true")
parser.add_argument("--lr", type=float, default=1e-4)
parser.add_argument("--tune-lr", type=float, default=1e-9)
return parser.parse_args()
def prepare_learner(args):
transforms = get_transforms(flip_vert=True, # enable flips in both directions but disable everything else
max_rotate=None, max_lighting=None, max_zoom=0, max_warp=None, p_affine=0, p_lighting=0)
logging.warning("Loading data from {}".format(args.data_dir))
data = ImageDataBunch.from_folder(os.path.join("data", args.data_dir),
seed=42, ds_tfms=transforms, size=256, bs=2).normalize(imagenet_stats)
logging.warning("Setting up model")
with warnings.catch_warnings():
warnings.simplefilter("ignore")
learner = load_learner(".")
learner.data = data
if len(learner.metrics) == 2:
logging.fatal("Modifying original model")
del learner.metrics[1]
learner.model[-1][-1] = nn.Linear(in_features=512, out_features=learner.data.c, bias=True).cuda()
logging.debug(learner.summary())
return learner
def prepare_results(recorder, epochs_offset=0):
assert len(recorder.metrics_names) == 1, "more metrics aren't implemented"
columns = ("epoch", recorder.metrics_names[0], "train_loss", "valid_loss")
epochs = range(epochs_offset, epochs_offset+len(recorder.nb_batches))
logging.info("Preparing training results of {} epochs with offset {}".format(len(epochs), epochs_offset))
metrics = [m[0].item() for m in recorder.metrics]
# aggregate mean loss per epoch
train_loss = []
offset = 0
for batch_size in recorder.nb_batches:
batch_losses = recorder.losses[offset:offset+batch_size]
offset += batch_size
train_loss.append(np.mean(batch_losses))
valid_loss = recorder.val_losses
return [columns] + list(zip(epochs, metrics, train_loss, valid_loss))
def export_results(results, args):
if not results:
logging.warning("No results to export")
return
metrics_file = os.path.join(args.eval_dir, "metrics.csv")
logging.info("Exporting results to '{}'".format(metrics_file))
with open(metrics_file, "w") as fh:
writer = csv.writer(fh)
for row in results:
writer.writerow(row)
def export_learner(learner, args):
learner_file = os.path.join(args.eval_dir, "model.pkl")
logging.info("Exporting learner to '{}'".format(learner_file))
learner.export(learner_file)
def test_learner(learner, file_name, args):
predictions, labels, losses = learner.get_preds(with_loss=True)
interpretation = ClassificationInterpretation(learner, predictions, labels, losses)
_ = interpretation.plot_confusion_matrix(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, file_name))
_ = interpretation.plot_confusion_matrix(return_fig=True, normalize=True)
plt.savefig(os.path.join(args.eval_dir, "normalized-" + file_name))
scores = recall_score(labels, np.argmax(predictions, axis=1), average=None)
logging.error("Mean class recall: {:.3f}".format(np.mean(scores)))
logging.info("Per-class recall: {}".format(", ".join(["{}: {:.3f}".format(c, a) for c, a in zip(learner.data.valid_ds.y.classes, scores)])))
def main():
results = None
try:
args = parse_arguments()
args.eval_dir = os.path.join("output", args.eid)
logging.info("Args: {}".format(args))
learner = prepare_learner(args)
data_set_size = len(learner.data.train_ds)
total_training = args.epochs * 23000
new_epochs = (total_training // data_set_size + 9) // 10 * 10
logging.fatal("Changing epochs from {} to {} to account for data set size {}".format(args.epochs, new_epochs, data_set_size))
logging.fatal("Samples per class: {}".format([len(np.where(learner.data.train_ds.y.items == c)[0]) for c in range(learner.data.train_ds.y.c)]))
args.epochs = new_epochs
if args.lr:
logging.warning("Training with lr={}".format(args.lr))
if args.cycle1:
learner.fit_one_cycle(args.epochs, max_lr=args.lr)
else:
learner.fit(args.epochs, lr=args.lr)
_ = learner.recorder.plot_losses(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "initial-loss.png"))
_ = learner.recorder.plot_metrics(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "initial-metrics.png"))
results = prepare_results(learner.recorder)
test_learner(learner, "initial-confusion-matrix.png", args)
else:
logging.warning("Not performing initial training!")
if not args.tune_lr:
logging.warning("Not fine-tuning model!")
return
# fine-tune
if not False:
logging.info("Unfreezing entire learner")
learner.unfreeze()
logging.debug(learner.summary())
else:
logging.fatal("NOT UNFREEZING")
logging.warning("Fine-tuning with lr={}".format(args.tune_lr))
if args.cycle1:
learner.fit_one_cycle(args.epochs, max_lr=args.tune_lr)
else:
learner.fit(args.epochs, lr=args.tune_lr)
_ = learner.recorder.plot_losses(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "tuned-loss.png"))
_ = learner.recorder.plot_metrics(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "tuned-metrics.png"))
results = prepare_results(learner.recorder) if results is None else \
results + prepare_results(learner.recorder, len(results)-1)[1:]
test_learner(learner, "tuned-confusion-matrix.png", args)
finally:
if results:
export_results(results, args)
export_learner(learner, args)
if __name__ == "__main__":
START_TIME = time.time()
np.random.seed(42)
configure_logging()
try:
main()
except Exception as ex:
logging.fatal("Exception occurred: {}".format(traceback.format_exc()))
finally:
logging.info("Finished eval after {:.1f}m".format((time.time() - START_TIME) / 60)) | src/isic.py |
import csv
import logging
import os
import time
import traceback
from argparse import ArgumentParser
import coloredlogs
from datetime import datetime
import warnings
import matplotlib
matplotlib.use("Agg")
import numpy as np
from fastai.vision import *
from sklearn.metrics import recall_score
def configure_logging():
coloredlogs.install(level="INFO")
coloredlogs.DEFAULT_LEVEL_STYLES = {
"debug": {"color": "white", "bold": False},
"info": {"color": "white", "bold": True},
"warning": {"color": "yellow", "bold": True},
"error": {"color": "red", "bold": True},
"fatal": {"color": "magenta", "bold": True},
}
logger = logging.getLogger("isic")
log_format = "%(asctime)s %(levelname)s %(message)s"
formatter = coloredlogs.ColoredFormatter(log_format)
for handler in logger.handlers:
handler.setFormatter(formatter)
logger.propagate = False
def parse_arguments():
parser = ArgumentParser()
parser.add_argument("--eid", type=str, required=True)
parser.add_argument("--epochs", type=int, default=50)
parser.add_argument("--data-dir", type=str, required=True)
parser.add_argument("--cycle1", action="store_true")
parser.add_argument("--lr", type=float, default=1e-4)
parser.add_argument("--tune-lr", type=float, default=1e-9)
return parser.parse_args()
def prepare_learner(args):
transforms = get_transforms(flip_vert=True, # enable flips in both directions but disable everything else
max_rotate=None, max_lighting=None, max_zoom=0, max_warp=None, p_affine=0, p_lighting=0)
logging.warning("Loading data from {}".format(args.data_dir))
data = ImageDataBunch.from_folder(os.path.join("data", args.data_dir),
seed=42, ds_tfms=transforms, size=256, bs=2).normalize(imagenet_stats)
logging.warning("Setting up model")
with warnings.catch_warnings():
warnings.simplefilter("ignore")
learner = load_learner(".")
learner.data = data
if len(learner.metrics) == 2:
logging.fatal("Modifying original model")
del learner.metrics[1]
learner.model[-1][-1] = nn.Linear(in_features=512, out_features=learner.data.c, bias=True).cuda()
logging.debug(learner.summary())
return learner
def prepare_results(recorder, epochs_offset=0):
assert len(recorder.metrics_names) == 1, "more metrics aren't implemented"
columns = ("epoch", recorder.metrics_names[0], "train_loss", "valid_loss")
epochs = range(epochs_offset, epochs_offset+len(recorder.nb_batches))
logging.info("Preparing training results of {} epochs with offset {}".format(len(epochs), epochs_offset))
metrics = [m[0].item() for m in recorder.metrics]
# aggregate mean loss per epoch
train_loss = []
offset = 0
for batch_size in recorder.nb_batches:
batch_losses = recorder.losses[offset:offset+batch_size]
offset += batch_size
train_loss.append(np.mean(batch_losses))
valid_loss = recorder.val_losses
return [columns] + list(zip(epochs, metrics, train_loss, valid_loss))
def export_results(results, args):
if not results:
logging.warning("No results to export")
return
metrics_file = os.path.join(args.eval_dir, "metrics.csv")
logging.info("Exporting results to '{}'".format(metrics_file))
with open(metrics_file, "w") as fh:
writer = csv.writer(fh)
for row in results:
writer.writerow(row)
def export_learner(learner, args):
learner_file = os.path.join(args.eval_dir, "model.pkl")
logging.info("Exporting learner to '{}'".format(learner_file))
learner.export(learner_file)
def test_learner(learner, file_name, args):
predictions, labels, losses = learner.get_preds(with_loss=True)
interpretation = ClassificationInterpretation(learner, predictions, labels, losses)
_ = interpretation.plot_confusion_matrix(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, file_name))
_ = interpretation.plot_confusion_matrix(return_fig=True, normalize=True)
plt.savefig(os.path.join(args.eval_dir, "normalized-" + file_name))
scores = recall_score(labels, np.argmax(predictions, axis=1), average=None)
logging.error("Mean class recall: {:.3f}".format(np.mean(scores)))
logging.info("Per-class recall: {}".format(", ".join(["{}: {:.3f}".format(c, a) for c, a in zip(learner.data.valid_ds.y.classes, scores)])))
def main():
results = None
try:
args = parse_arguments()
args.eval_dir = os.path.join("output", args.eid)
logging.info("Args: {}".format(args))
learner = prepare_learner(args)
data_set_size = len(learner.data.train_ds)
total_training = args.epochs * 23000
new_epochs = (total_training // data_set_size + 9) // 10 * 10
logging.fatal("Changing epochs from {} to {} to account for data set size {}".format(args.epochs, new_epochs, data_set_size))
logging.fatal("Samples per class: {}".format([len(np.where(learner.data.train_ds.y.items == c)[0]) for c in range(learner.data.train_ds.y.c)]))
args.epochs = new_epochs
if args.lr:
logging.warning("Training with lr={}".format(args.lr))
if args.cycle1:
learner.fit_one_cycle(args.epochs, max_lr=args.lr)
else:
learner.fit(args.epochs, lr=args.lr)
_ = learner.recorder.plot_losses(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "initial-loss.png"))
_ = learner.recorder.plot_metrics(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "initial-metrics.png"))
results = prepare_results(learner.recorder)
test_learner(learner, "initial-confusion-matrix.png", args)
else:
logging.warning("Not performing initial training!")
if not args.tune_lr:
logging.warning("Not fine-tuning model!")
return
# fine-tune
if not False:
logging.info("Unfreezing entire learner")
learner.unfreeze()
logging.debug(learner.summary())
else:
logging.fatal("NOT UNFREEZING")
logging.warning("Fine-tuning with lr={}".format(args.tune_lr))
if args.cycle1:
learner.fit_one_cycle(args.epochs, max_lr=args.tune_lr)
else:
learner.fit(args.epochs, lr=args.tune_lr)
_ = learner.recorder.plot_losses(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "tuned-loss.png"))
_ = learner.recorder.plot_metrics(return_fig=True)
plt.savefig(os.path.join(args.eval_dir, "tuned-metrics.png"))
results = prepare_results(learner.recorder) if results is None else \
results + prepare_results(learner.recorder, len(results)-1)[1:]
test_learner(learner, "tuned-confusion-matrix.png", args)
finally:
if results:
export_results(results, args)
export_learner(learner, args)
if __name__ == "__main__":
START_TIME = time.time()
np.random.seed(42)
configure_logging()
try:
main()
except Exception as ex:
logging.fatal("Exception occurred: {}".format(traceback.format_exc()))
finally:
logging.info("Finished eval after {:.1f}m".format((time.time() - START_TIME) / 60)) | 0.566139 | 0.167202 |
import numpy as np
import os
import cv2
from Constants import baseDir,erzhimap_Dir,raw_val_img_dir,visual_Dir
import scipy.misc as misc
def dot_Visualization(img_data, lbl_data,box_data, save_path, idx): # 可视化像素点
w, h = img_data.shape[0], img_data.shape[1]
image = np.zeros((w, h), np.uint8)
for xy in lbl_data:
cv2.circle(img_data, (xy[0],xy[1]), 2, (255,255,255), -1)
# for b in box_data:
# cv2.rectangle(img_data, (b[0],b[1]), (b[2],b[3]), (0,255,0), 1)
cv2.imwrite(save_path + idx, img_data)
if __name__ =="__main__":
# imgDir = "/home/jjliao/Visdrone_yolo_cluster/VisDrone2019-DET-train/images/"
imgDir = "/home/jjliao/Visdrone_coco/images/val/"
txtDir = "/data/data/cluster-detector/erzhimap-yolov4/"
boxDir = "/data/data/cluster-detector/erzhimap-box/"
images = [i for i in os.listdir(imgDir) if '.jpg' in i]
labels = [i for i in os.listdir(txtDir) if '.txt' in i]
print('find image', len(images))
print('find label', len(labels))
width, height = 600, 600
for idx,lbl in enumerate(labels):
if idx>50:
break
img_id = lbl[:-4]
img = lbl.replace('txt', 'jpg')
# img_data = misc.imread(os.path.join(imgDir, img))
imgpath = os.path.join(imgDir, img)
img_data = cv2.imread(imgpath, -1)
print("img path:",imgpath)
height, width = img_data.shape[:2]
# 缩小图像
size = (int(width*0.25), int(height*0.25))
# img_data = cv2.resize(img_data, size, interpolation=cv2.INTER_AREA)
lbl_path = os.path.join(txtDir, lbl)
box_path = os.path.join(boxDir, lbl)
lbl_data = np.loadtxt(lbl_path,dtype=np.int32,delimiter=",")
# box_data = np.loadtxt(box_path,dtype=np.int32,delimiter=",")
if len(lbl_data)==0:
print("ERROR: empty data:",lbl)
continue
dot_Visualization(img_data, lbl_data,[],visual_Dir, img) | custom/Visual.py | import numpy as np
import os
import cv2
from Constants import baseDir,erzhimap_Dir,raw_val_img_dir,visual_Dir
import scipy.misc as misc
def dot_Visualization(img_data, lbl_data,box_data, save_path, idx): # 可视化像素点
w, h = img_data.shape[0], img_data.shape[1]
image = np.zeros((w, h), np.uint8)
for xy in lbl_data:
cv2.circle(img_data, (xy[0],xy[1]), 2, (255,255,255), -1)
# for b in box_data:
# cv2.rectangle(img_data, (b[0],b[1]), (b[2],b[3]), (0,255,0), 1)
cv2.imwrite(save_path + idx, img_data)
if __name__ =="__main__":
# imgDir = "/home/jjliao/Visdrone_yolo_cluster/VisDrone2019-DET-train/images/"
imgDir = "/home/jjliao/Visdrone_coco/images/val/"
txtDir = "/data/data/cluster-detector/erzhimap-yolov4/"
boxDir = "/data/data/cluster-detector/erzhimap-box/"
images = [i for i in os.listdir(imgDir) if '.jpg' in i]
labels = [i for i in os.listdir(txtDir) if '.txt' in i]
print('find image', len(images))
print('find label', len(labels))
width, height = 600, 600
for idx,lbl in enumerate(labels):
if idx>50:
break
img_id = lbl[:-4]
img = lbl.replace('txt', 'jpg')
# img_data = misc.imread(os.path.join(imgDir, img))
imgpath = os.path.join(imgDir, img)
img_data = cv2.imread(imgpath, -1)
print("img path:",imgpath)
height, width = img_data.shape[:2]
# 缩小图像
size = (int(width*0.25), int(height*0.25))
# img_data = cv2.resize(img_data, size, interpolation=cv2.INTER_AREA)
lbl_path = os.path.join(txtDir, lbl)
box_path = os.path.join(boxDir, lbl)
lbl_data = np.loadtxt(lbl_path,dtype=np.int32,delimiter=",")
# box_data = np.loadtxt(box_path,dtype=np.int32,delimiter=",")
if len(lbl_data)==0:
print("ERROR: empty data:",lbl)
continue
dot_Visualization(img_data, lbl_data,[],visual_Dir, img) | 0.066471 | 0.153042 |
import sys
import logging
import cProfile
import functools
import threading
import pprint
logdata = threading.local()
INDENT_WIDTH = 2
MAX_INDENT = 20
COLORS = {
"RED": '\033[91m',
"GREEN": '\033[92m',
"YELLOW": '\033[93m',
"BLUE": '\033[94m',
"MAGENTA": '\033[95m',
"CYAN": '\033[96m',
"RESET": '\033[0m',
}
def _getLocal(name):
if not hasattr(logdata, name):
setattr(logdata, name, {})
return getattr(logdata, name)
def inject_logger_wrapper(ns, name):
org = getattr(ns, name)
def _get_indent():
d = _getLocal('indent')
return d.setdefault(name.upper(), 0)
def _add_indent(n):
d = _getLocal('indent')
d[name.upper()] = min(max(0, _get_indent() + n), MAX_INDENT)
@functools.wraps(org)
def f(self, msg, *args, **kwargs):
indent = _get_indent()
return org(self, ' '*indent + msg, *args, **kwargs)
setattr(ns, name, f)
# utifily functions
def begin_block():
_add_indent(INDENT_WIDTH)
def end_block():
_add_indent(-1*INDENT_WIDTH)
def setcolor(color=None):
if color:
color = COLORS[color]
d = _getLocal('color')
d[name.upper()] = color
g = getattr(logging, name)
f.block = g.block = begin_block
f.endblock = g.endblock = end_block
f.setcolor = g.setcolor = setcolor
inject_logger_wrapper(logging.Logger, 'debug')
inject_logger_wrapper(logging.Logger, 'info')
inject_logger_wrapper(logging.Logger, 'warning')
inject_logger_wrapper(logging.Logger, 'error')
inject_logger_wrapper(logging.Logger, 'critical')
inject_logger_wrapper(logging.Logger, 'exception')
inject_logger_wrapper(logging.Logger, 'log')
def inject_streamhandler_wrapper():
org_format = logging.StreamHandler.format
def get_color(levelname):
d = _getLocal('color')
return d.setdefault(levelname, None)
@functools.wraps(org_format)
def format(self, record):
ret = org_format(self, record)
if hasattr(sys.stdout, 'isatty') and self.stream.isatty():
color = get_color(record.levelname.upper())
if color:
ret = color+ret+'\033[0m'
return ret
logging.StreamHandler.format = format
inject_streamhandler_wrapper() | happylogging/utils.py | import sys
import logging
import cProfile
import functools
import threading
import pprint
logdata = threading.local()
INDENT_WIDTH = 2
MAX_INDENT = 20
COLORS = {
"RED": '\033[91m',
"GREEN": '\033[92m',
"YELLOW": '\033[93m',
"BLUE": '\033[94m',
"MAGENTA": '\033[95m',
"CYAN": '\033[96m',
"RESET": '\033[0m',
}
def _getLocal(name):
if not hasattr(logdata, name):
setattr(logdata, name, {})
return getattr(logdata, name)
def inject_logger_wrapper(ns, name):
org = getattr(ns, name)
def _get_indent():
d = _getLocal('indent')
return d.setdefault(name.upper(), 0)
def _add_indent(n):
d = _getLocal('indent')
d[name.upper()] = min(max(0, _get_indent() + n), MAX_INDENT)
@functools.wraps(org)
def f(self, msg, *args, **kwargs):
indent = _get_indent()
return org(self, ' '*indent + msg, *args, **kwargs)
setattr(ns, name, f)
# utifily functions
def begin_block():
_add_indent(INDENT_WIDTH)
def end_block():
_add_indent(-1*INDENT_WIDTH)
def setcolor(color=None):
if color:
color = COLORS[color]
d = _getLocal('color')
d[name.upper()] = color
g = getattr(logging, name)
f.block = g.block = begin_block
f.endblock = g.endblock = end_block
f.setcolor = g.setcolor = setcolor
inject_logger_wrapper(logging.Logger, 'debug')
inject_logger_wrapper(logging.Logger, 'info')
inject_logger_wrapper(logging.Logger, 'warning')
inject_logger_wrapper(logging.Logger, 'error')
inject_logger_wrapper(logging.Logger, 'critical')
inject_logger_wrapper(logging.Logger, 'exception')
inject_logger_wrapper(logging.Logger, 'log')
def inject_streamhandler_wrapper():
org_format = logging.StreamHandler.format
def get_color(levelname):
d = _getLocal('color')
return d.setdefault(levelname, None)
@functools.wraps(org_format)
def format(self, record):
ret = org_format(self, record)
if hasattr(sys.stdout, 'isatty') and self.stream.isatty():
color = get_color(record.levelname.upper())
if color:
ret = color+ret+'\033[0m'
return ret
logging.StreamHandler.format = format
inject_streamhandler_wrapper() | 0.247987 | 0.074736 |
import subprocess
from . import ControllerException
class WrappedProcess:
def __init__(self, command, args=None):
self._command = command
if args is not None:
if isinstance(args, (list, tuple)):
self._command.extend(args)
else:
self._command.append(args)
self._process = None
def run(self):
if self._process is not None:
return
try:
print("Running ", self._command)
self._process = subprocess.Popen(self._command)
except OSError as e:
raise ControllerException("Could not execute %s" % " ".join(self._command), e)
def terminate(self):
try:
print("Stopping " % self._command)
self._process.terminate()
self.wait()
print("done")
except OSError:
# ignore when the process has already terminated
return 0
def wait(self):
if self._process is None:
return
try:
self._process.wait()
except OSError as e:
pass
finally:
self._process = None
class NoopProcess(object):
def run(self):
pass
def terminate(self):
pass
def wait(self):
pass
class ConfigAction(object):
"""
Base class for actions that configure components.
"""
def __init__(self):
self.active = False
def activate(self):
"""
Activates the configuration.
:throws: ControllerException when there is a problem
:return: None
"""
if self.active:
return
self._activate()
self.active = True
def _activate(self):
pass
def deactivate(self):
"""
Deactivates the configuration.
:throws: ControllerException when there is a problem
:return: None
"""
if not self.active:
return
self._deactivate()
self.active = False
def _deactivate(self):
pass
class CompoundAction(ConfigAction):
"""
Configuration action that invokes a sequence of actions
"""
def __init__(self, actions):
ConfigAction.__init__(self)
self._actions = list(actions)
def _activate(self):
for action in self._actions:
action.activate()
def _deactivate(self):
for action in self._actions:
action.deactivate()
class PersistentProcessConfigAction(ConfigAction):
"""
Action that executes a process on activation, and kills it on process on deactivation.
Arguments taken are the process and its arguments.
"""
def __init__(self, command, args=None):
ConfigAction.__init__(self)
self._process = WrappedProcess(command, args)
def _activate(self):
self._process.run()
def _deactivate(self):
self._process.terminate()
class EntryExitProcessConfigAction(ConfigAction):
_NOOP = NoopProcess()
"""
Action that executes one process on activation, and another one on deactivation.
Arguments taken are the process and its arguments.
"""
def __init__(self, entry_command=None, exit_command=None, entry_args=None, exit_args=None):
ConfigAction.__init__(self)
if entry_command is not None:
self._entry_process = WrappedProcess(entry_command, entry_args)
else:
self._entry_process = self._NOOP
if exit_command is not None:
self._exit_process = WrappedProcess(exit_command, exit_args)
else:
self._exit_process = self._NOOP
def _activate(self):
self._entry_process.run()
self._entry_process.wait()
def _deactivate(self):
self._exit_process.run()
self._exit_process.wait() | mode_manager/src/mode_manager/config_action.py |
import subprocess
from . import ControllerException
class WrappedProcess:
def __init__(self, command, args=None):
self._command = command
if args is not None:
if isinstance(args, (list, tuple)):
self._command.extend(args)
else:
self._command.append(args)
self._process = None
def run(self):
if self._process is not None:
return
try:
print("Running ", self._command)
self._process = subprocess.Popen(self._command)
except OSError as e:
raise ControllerException("Could not execute %s" % " ".join(self._command), e)
def terminate(self):
try:
print("Stopping " % self._command)
self._process.terminate()
self.wait()
print("done")
except OSError:
# ignore when the process has already terminated
return 0
def wait(self):
if self._process is None:
return
try:
self._process.wait()
except OSError as e:
pass
finally:
self._process = None
class NoopProcess(object):
def run(self):
pass
def terminate(self):
pass
def wait(self):
pass
class ConfigAction(object):
"""
Base class for actions that configure components.
"""
def __init__(self):
self.active = False
def activate(self):
"""
Activates the configuration.
:throws: ControllerException when there is a problem
:return: None
"""
if self.active:
return
self._activate()
self.active = True
def _activate(self):
pass
def deactivate(self):
"""
Deactivates the configuration.
:throws: ControllerException when there is a problem
:return: None
"""
if not self.active:
return
self._deactivate()
self.active = False
def _deactivate(self):
pass
class CompoundAction(ConfigAction):
"""
Configuration action that invokes a sequence of actions
"""
def __init__(self, actions):
ConfigAction.__init__(self)
self._actions = list(actions)
def _activate(self):
for action in self._actions:
action.activate()
def _deactivate(self):
for action in self._actions:
action.deactivate()
class PersistentProcessConfigAction(ConfigAction):
"""
Action that executes a process on activation, and kills it on process on deactivation.
Arguments taken are the process and its arguments.
"""
def __init__(self, command, args=None):
ConfigAction.__init__(self)
self._process = WrappedProcess(command, args)
def _activate(self):
self._process.run()
def _deactivate(self):
self._process.terminate()
class EntryExitProcessConfigAction(ConfigAction):
_NOOP = NoopProcess()
"""
Action that executes one process on activation, and another one on deactivation.
Arguments taken are the process and its arguments.
"""
def __init__(self, entry_command=None, exit_command=None, entry_args=None, exit_args=None):
ConfigAction.__init__(self)
if entry_command is not None:
self._entry_process = WrappedProcess(entry_command, entry_args)
else:
self._entry_process = self._NOOP
if exit_command is not None:
self._exit_process = WrappedProcess(exit_command, exit_args)
else:
self._exit_process = self._NOOP
def _activate(self):
self._entry_process.run()
self._entry_process.wait()
def _deactivate(self):
self._exit_process.run()
self._exit_process.wait() | 0.44746 | 0.078008 |
import pickle
import time
import numpy as np
import torch
import os
from datasets.dataset_invivo_sinograms import DatasetInvivoSinograms
from models.network_denoising import DenoisingNet
from torch.utils.tensorboard import SummaryWriter
from set_locals.set_local_experiment_infer import set_local_experiment_infer
from utils.environment_check import environment_check
from utils.get_output_folders import get_output_folders_for_train_val, get_output_folder_for_infer
from medpy.io import save
if __name__ == '__main__':
e_infer = set_local_experiment_infer()
e_train_val = pickle.load(open(os.path.join(e_infer.path_experiment_train_val_and_weights, 'experiment_train_val.pickle'), 'rb'))
use_cuda, device, num_workers = environment_check(e_infer.gpu_index_for_inference)
experiment_base_path, denoised_sinogrmas_path = get_output_folder_for_infer(e_infer.save_path_infer, e_train_val.experiment_name)
# Define network and load weights
network = DenoisingNet(e_train_val)
checkpoint = torch.load(os.path.join(e_infer.path_experiment_train_val_and_weights, 'model_min_val_loss.pt'), map_location=device)
network.load_state_dict(checkpoint['network_state_dict'])
network.eval()
if use_cuda:
network.cuda()
# Load test dataset for inference
params_dataloader_test = {'batch_size': 1, 'shuffle': False, 'num_workers': 3, 'drop_last': False}
dataloader_test = torch.utils.data.DataLoader(
dataset=DatasetInvivoSinograms(e_infer.path_noisy_input_sinograms,
e_train_val.divisor_for_data_normalization,
regex_fullmatch_for_filenames=e_infer.regex_fullmatch_for_filenames),
**params_dataloader_test)
print('The number of test images = %d' % len(dataloader_test.dataset))
time_infer_start = time.time()
with torch.no_grad():
for id_batch, (noisy_signal, name_noisy_signal) in enumerate(dataloader_test):
# --- Forward pass
output = network(noisy_signal.to(device).float())
denoised_signal = noisy_signal - output.cpu()
noisy_signal = np.squeeze(noisy_signal.numpy()) * e_train_val.divisor_for_data_normalization
denoised_signal = np.squeeze(denoised_signal.numpy()) * e_train_val.divisor_for_data_normalization
# Note: Don't save noisy test sinograms because they are not altered by the network
save(denoised_signal, os.path.join(denoised_sinogrmas_path, name_noisy_signal[0] + '.nii'))
print('Saved denoised sinogram of "%s".' % name_noisy_signal[0]) | infer.py | import pickle
import time
import numpy as np
import torch
import os
from datasets.dataset_invivo_sinograms import DatasetInvivoSinograms
from models.network_denoising import DenoisingNet
from torch.utils.tensorboard import SummaryWriter
from set_locals.set_local_experiment_infer import set_local_experiment_infer
from utils.environment_check import environment_check
from utils.get_output_folders import get_output_folders_for_train_val, get_output_folder_for_infer
from medpy.io import save
if __name__ == '__main__':
e_infer = set_local_experiment_infer()
e_train_val = pickle.load(open(os.path.join(e_infer.path_experiment_train_val_and_weights, 'experiment_train_val.pickle'), 'rb'))
use_cuda, device, num_workers = environment_check(e_infer.gpu_index_for_inference)
experiment_base_path, denoised_sinogrmas_path = get_output_folder_for_infer(e_infer.save_path_infer, e_train_val.experiment_name)
# Define network and load weights
network = DenoisingNet(e_train_val)
checkpoint = torch.load(os.path.join(e_infer.path_experiment_train_val_and_weights, 'model_min_val_loss.pt'), map_location=device)
network.load_state_dict(checkpoint['network_state_dict'])
network.eval()
if use_cuda:
network.cuda()
# Load test dataset for inference
params_dataloader_test = {'batch_size': 1, 'shuffle': False, 'num_workers': 3, 'drop_last': False}
dataloader_test = torch.utils.data.DataLoader(
dataset=DatasetInvivoSinograms(e_infer.path_noisy_input_sinograms,
e_train_val.divisor_for_data_normalization,
regex_fullmatch_for_filenames=e_infer.regex_fullmatch_for_filenames),
**params_dataloader_test)
print('The number of test images = %d' % len(dataloader_test.dataset))
time_infer_start = time.time()
with torch.no_grad():
for id_batch, (noisy_signal, name_noisy_signal) in enumerate(dataloader_test):
# --- Forward pass
output = network(noisy_signal.to(device).float())
denoised_signal = noisy_signal - output.cpu()
noisy_signal = np.squeeze(noisy_signal.numpy()) * e_train_val.divisor_for_data_normalization
denoised_signal = np.squeeze(denoised_signal.numpy()) * e_train_val.divisor_for_data_normalization
# Note: Don't save noisy test sinograms because they are not altered by the network
save(denoised_signal, os.path.join(denoised_sinogrmas_path, name_noisy_signal[0] + '.nii'))
print('Saved denoised sinogram of "%s".' % name_noisy_signal[0]) | 0.769254 | 0.26182 |
from typing import Callable, Generic, Optional
from fpylib.functors.applicative import Applicative
from fpylib.functors.functor import _S, _T
from fpylib.functors.monad import Monad, unit
class Maybe(Applicative, Monad, Generic[_T]):
"""
This is a implementation of the Maybe Monad of Haskell. It is a functor, applicative and monad.
"""
def unit(self, value: _T) -> "Maybe[_T]":
"""
Return a Just pr Nothing value based on if the value is None or not.
:param value: The value to be checked.
:type value: T
:return: Just value or Nothing
"""
return Just(value) if value is not None else Nothing()
def bind(self, func: Callable[[_T], _S]) -> "Maybe[_S]":
"""
Return a Just pr Nothing value based on if occur an error or not.
:param func: The function to be applied.
:type func: Callable[[T], S]
:return: Just value or Nothing
"""
try:
value = func(self.get())
if value is None:
return Nothing(ValueError("The value is None"))
return Just(value)
except Exception as e:
return Nothing(e)
class Just(Maybe):
def __str__(self) -> str:
return f"Just {self.get()}"
def __repr__(self) -> str:
return f"Just {type(self.get())}"
class Nothing(Maybe):
def __init__(self, *failure: Optional[Exception]) -> None:
"""
This does nothing.
"""
object.__setattr__(
self, "_Nothing__failure", filter(lambda fail: fail is not None, failure)
)
def fails(self) -> bool:
return self.__failure
def __str__(self) -> str:
return "Nothing"
def __repr__(self) -> str:
return f"{self.__str__()} {list(self.__failure)}"
def maybe_conditioner(func: Callable[..., _T]) -> Callable[..., "Maybe[_T]"]:
"""
Conditioner for Maybe.
:param func: The function to wrap in a Monad.
:type func: Callable[..., T]
:return: The wrapped function.
:rtype: Callable[..., Monad[T]]
"""
def wrapper(*arg, **kwargs) -> "Maybe[_T]":
try:
return unit(Maybe, func(*arg, **kwargs))
except Exception as e:
return Nothing(e)
return wrapper | fpylib/functors/maybe.py | from typing import Callable, Generic, Optional
from fpylib.functors.applicative import Applicative
from fpylib.functors.functor import _S, _T
from fpylib.functors.monad import Monad, unit
class Maybe(Applicative, Monad, Generic[_T]):
"""
This is a implementation of the Maybe Monad of Haskell. It is a functor, applicative and monad.
"""
def unit(self, value: _T) -> "Maybe[_T]":
"""
Return a Just pr Nothing value based on if the value is None or not.
:param value: The value to be checked.
:type value: T
:return: Just value or Nothing
"""
return Just(value) if value is not None else Nothing()
def bind(self, func: Callable[[_T], _S]) -> "Maybe[_S]":
"""
Return a Just pr Nothing value based on if occur an error or not.
:param func: The function to be applied.
:type func: Callable[[T], S]
:return: Just value or Nothing
"""
try:
value = func(self.get())
if value is None:
return Nothing(ValueError("The value is None"))
return Just(value)
except Exception as e:
return Nothing(e)
class Just(Maybe):
def __str__(self) -> str:
return f"Just {self.get()}"
def __repr__(self) -> str:
return f"Just {type(self.get())}"
class Nothing(Maybe):
def __init__(self, *failure: Optional[Exception]) -> None:
"""
This does nothing.
"""
object.__setattr__(
self, "_Nothing__failure", filter(lambda fail: fail is not None, failure)
)
def fails(self) -> bool:
return self.__failure
def __str__(self) -> str:
return "Nothing"
def __repr__(self) -> str:
return f"{self.__str__()} {list(self.__failure)}"
def maybe_conditioner(func: Callable[..., _T]) -> Callable[..., "Maybe[_T]"]:
"""
Conditioner for Maybe.
:param func: The function to wrap in a Monad.
:type func: Callable[..., T]
:return: The wrapped function.
:rtype: Callable[..., Monad[T]]
"""
def wrapper(*arg, **kwargs) -> "Maybe[_T]":
try:
return unit(Maybe, func(*arg, **kwargs))
except Exception as e:
return Nothing(e)
return wrapper | 0.949004 | 0.336386 |
import os
import sys
import torch
import torchvision
import torch.utils.data
import numpy as np
from pprint import pprint
from itertools import combinations
from torch import nn, optim
from torch.nn import functional as F
from torch.distributions import Bernoulli, RelaxedBernoulli
from torchvision import datasets, models, transforms
SMOOTH = 1e-6
class SuperMask(nn.Module):
def __init__(self, domain_list, act_size, init_setting="random", init_scalar=1):
super(SuperMask, self).__init__()
self.domain_list = domain_list
self.act_size = act_size
self.init_setting = init_setting
self.init_scalar = init_scalar
# Define the super mask logits
if self.init_setting == "random_uniform":
self.super_mask_logits = nn.ParameterDict(
{
x: nn.Parameter(torch.rand(self.act_size, requires_grad=True))
for x in self.domain_list
}
)
elif self.init_setting == "scalar":
param_tensor = torch.ones(self.act_size, requires_grad=True)
param_tensor = param_tensor.new_tensor(
[self.init_scalar] * self.act_size, requires_grad=True
)
self.super_mask_logits = nn.ParameterDict(
{x: nn.Parameter(param_tensor.clone()) for x in self.domain_list}
)
def forward(self, activation, domain, mode="sample", conv_mode=False):
# Mask repeated along channel dimensions if conv_mode == True
probs = [nn.Sigmoid()(self.super_mask_logits[x]) for x in domain]
probs = torch.stack(probs)
if mode == "sample":
mask_dist = Bernoulli(probs)
hard_mask = mask_dist.sample()
soft_mask = probs
mask = (hard_mask - soft_mask).detach() + soft_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = mask.view(mask.shape[0], mask.shape[1], 1, 1)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = mask * activation
elif mode == "greedy":
hard_mask = (probs > 0.5).float()
soft_mask = probs
mask = (hard_mask - soft_mask).detach() + soft_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = mask.view(mask.shape[0], mask.shape[1], 1, 1)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = mask * activation
elif mode == "softscale":
hard_mask = (probs > 0.5).float()
soft_mask = probs
mask = hard_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = soft_mask.view(
soft_mask.shape[0], soft_mask.shape[1], 1, 1
)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = soft_mask * activation
elif mode == "avg_mask_softscale":
# Average all the source domain masks
# instead of combining them
all_probs = [
nn.Sigmoid()(self.super_mask_logits[x]) for x in self.domain_list
]
all_probs = torch.mean(torch.stack(all_probs), 0)
mean_mask = [all_probs for x in domain]
mean_mask = torch.stack(mean_mask)
soft_mask = mean_mask
hard_mask = (mean_mask > 0.5).float()
mask = hard_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = soft_mask.view(
soft_mask.shape[0], soft_mask.shape[1], 1, 1
)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = soft_mask * activation
return (activation, mask, soft_mask)
def sparsity(self, mask):
return torch.mean(mask, dim=1)
def sparsity_penalty(self):
sparse_pen = 0
for _, v in self.super_mask_logits.items():
sparse_pen += torch.sum(nn.Sigmoid()(v))
return sparse_pen
def overlap_penalty(self):
overlap_pen = 0
domain_pairs = list(combinations(self.domain_list, 2))
for pair in domain_pairs:
dom1, dom2 = pair
mask1 = nn.Sigmoid()(self.super_mask_logits[dom1])
mask2 = nn.Sigmoid()(self.super_mask_logits[dom2])
intersection = torch.sum(mask1 * mask2)
union = torch.sum(mask1 + mask2 - mask1 * mask2)
iou = (intersection + SMOOTH) / (union + SMOOTH)
overlap_pen += iou
overlap_pen /= len(domain_pairs)
return overlap_pen
def mask_overlap(self, layer_name=""):
if layer_name != "":
prefix = layer_name + " : "
else:
prefix = ""
domain_pairs = combinations(self.domain_list, 2)
iou_overlap_dict = {}
for pair in domain_pairs:
mask_0 = nn.Sigmoid()(self.super_mask_logits[pair[0]])
mask_1 = nn.Sigmoid()(self.super_mask_logits[pair[1]])
mask_0 = mask_0 > 0.5
mask_1 = mask_1 > 0.5
intersection = (mask_0 & mask_1).float().sum()
union = (mask_0 | mask_1).float().sum()
iou = (intersection + SMOOTH) / (union + SMOOTH)
iou_overlap_dict[
prefix + pair[0] + ", " + pair[1] + " IoU-Ov"
] = iou.data.item()
iou_overlap_dict[prefix + "overall IoU-Ov"] = np.mean(
[x for x in list(iou_overlap_dict.values())]
)
return iou_overlap_dict
@classmethod
def from_config(cls, config, act_size):
_C = config
domains = _C.DATA.DOMAIN_LIST
if "," in domains:
domains = _C.DATA.DOMAIN_LIST.split(",")
return cls(
domains, act_size, _C.MODEL.MASK_INIT_SETTING, _C.MODEL.MASK_INIT_SCALAR
) | models/supermasks.py | import os
import sys
import torch
import torchvision
import torch.utils.data
import numpy as np
from pprint import pprint
from itertools import combinations
from torch import nn, optim
from torch.nn import functional as F
from torch.distributions import Bernoulli, RelaxedBernoulli
from torchvision import datasets, models, transforms
SMOOTH = 1e-6
class SuperMask(nn.Module):
def __init__(self, domain_list, act_size, init_setting="random", init_scalar=1):
super(SuperMask, self).__init__()
self.domain_list = domain_list
self.act_size = act_size
self.init_setting = init_setting
self.init_scalar = init_scalar
# Define the super mask logits
if self.init_setting == "random_uniform":
self.super_mask_logits = nn.ParameterDict(
{
x: nn.Parameter(torch.rand(self.act_size, requires_grad=True))
for x in self.domain_list
}
)
elif self.init_setting == "scalar":
param_tensor = torch.ones(self.act_size, requires_grad=True)
param_tensor = param_tensor.new_tensor(
[self.init_scalar] * self.act_size, requires_grad=True
)
self.super_mask_logits = nn.ParameterDict(
{x: nn.Parameter(param_tensor.clone()) for x in self.domain_list}
)
def forward(self, activation, domain, mode="sample", conv_mode=False):
# Mask repeated along channel dimensions if conv_mode == True
probs = [nn.Sigmoid()(self.super_mask_logits[x]) for x in domain]
probs = torch.stack(probs)
if mode == "sample":
mask_dist = Bernoulli(probs)
hard_mask = mask_dist.sample()
soft_mask = probs
mask = (hard_mask - soft_mask).detach() + soft_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = mask.view(mask.shape[0], mask.shape[1], 1, 1)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = mask * activation
elif mode == "greedy":
hard_mask = (probs > 0.5).float()
soft_mask = probs
mask = (hard_mask - soft_mask).detach() + soft_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = mask.view(mask.shape[0], mask.shape[1], 1, 1)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = mask * activation
elif mode == "softscale":
hard_mask = (probs > 0.5).float()
soft_mask = probs
mask = hard_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = soft_mask.view(
soft_mask.shape[0], soft_mask.shape[1], 1, 1
)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = soft_mask * activation
elif mode == "avg_mask_softscale":
# Average all the source domain masks
# instead of combining them
all_probs = [
nn.Sigmoid()(self.super_mask_logits[x]) for x in self.domain_list
]
all_probs = torch.mean(torch.stack(all_probs), 0)
mean_mask = [all_probs for x in domain]
mean_mask = torch.stack(mean_mask)
soft_mask = mean_mask
hard_mask = (mean_mask > 0.5).float()
mask = hard_mask
if conv_mode and len(activation.shape) > 2:
apply_mask = soft_mask.view(
soft_mask.shape[0], soft_mask.shape[1], 1, 1
)
apply_mask = apply_mask.repeat(
1, 1, activation.shape[2], activation.shape[3]
)
activation = apply_mask * activation
else:
activation = soft_mask * activation
return (activation, mask, soft_mask)
def sparsity(self, mask):
return torch.mean(mask, dim=1)
def sparsity_penalty(self):
sparse_pen = 0
for _, v in self.super_mask_logits.items():
sparse_pen += torch.sum(nn.Sigmoid()(v))
return sparse_pen
def overlap_penalty(self):
overlap_pen = 0
domain_pairs = list(combinations(self.domain_list, 2))
for pair in domain_pairs:
dom1, dom2 = pair
mask1 = nn.Sigmoid()(self.super_mask_logits[dom1])
mask2 = nn.Sigmoid()(self.super_mask_logits[dom2])
intersection = torch.sum(mask1 * mask2)
union = torch.sum(mask1 + mask2 - mask1 * mask2)
iou = (intersection + SMOOTH) / (union + SMOOTH)
overlap_pen += iou
overlap_pen /= len(domain_pairs)
return overlap_pen
def mask_overlap(self, layer_name=""):
if layer_name != "":
prefix = layer_name + " : "
else:
prefix = ""
domain_pairs = combinations(self.domain_list, 2)
iou_overlap_dict = {}
for pair in domain_pairs:
mask_0 = nn.Sigmoid()(self.super_mask_logits[pair[0]])
mask_1 = nn.Sigmoid()(self.super_mask_logits[pair[1]])
mask_0 = mask_0 > 0.5
mask_1 = mask_1 > 0.5
intersection = (mask_0 & mask_1).float().sum()
union = (mask_0 | mask_1).float().sum()
iou = (intersection + SMOOTH) / (union + SMOOTH)
iou_overlap_dict[
prefix + pair[0] + ", " + pair[1] + " IoU-Ov"
] = iou.data.item()
iou_overlap_dict[prefix + "overall IoU-Ov"] = np.mean(
[x for x in list(iou_overlap_dict.values())]
)
return iou_overlap_dict
@classmethod
def from_config(cls, config, act_size):
_C = config
domains = _C.DATA.DOMAIN_LIST
if "," in domains:
domains = _C.DATA.DOMAIN_LIST.split(",")
return cls(
domains, act_size, _C.MODEL.MASK_INIT_SETTING, _C.MODEL.MASK_INIT_SCALAR
) | 0.712532 | 0.38659 |
import logging
import json
import sys
import csv
from inspect import isclass
from stdnet.utils import StringIO
from .globals import get_model_from_hash
__all__ = ['get_serializer',
'register_serializer',
'unregister_serializer',
'all_serializers',
'Serializer',
'JsonSerializer']
LOGGER = logging.getLogger('stdnet.odm')
_serializers = {}
if sys.version_info < (2, 7): # pragma: no cover
def writeheader(dw):
# hack to handle writeheader in python 2.6
dw.writerow(dict(((k, k) for k in dw.fieldnames)))
else:
def writeheader(dw):
dw.writeheader()
def get_serializer(name, **options):
'''Retrieve a serializer register as *name*. If the serializer is not
available a ``ValueError`` exception will raise.
A common usage pattern::
qs = MyModel.objects.query().sort_by('id')
s = odm.get_serializer('json')
s.dump(qs)
'''
if name in _serializers:
serializer = _serializers[name]
return serializer(**options)
else:
raise ValueError('Unknown serializer {0}.'.format(name))
def register_serializer(name, serializer):
'''\
Register a new serializer to the library.
:parameter name: serializer name (it can override existing serializers).
:parameter serializer: an instance or a derived class of a
:class:`stdnet.odm.Serializer` class or a callable.
'''
if not isclass(serializer):
serializer = serializer.__class__
_serializers[name] = serializer
def unregister_serializer(name):
return _serializers.pop(name, None)
def all_serializers():
return sorted(_serializers)
class Serializer(object):
'''The stdnet serializer base class. During initialization, the *options*
dictionary is used to override the :attr:`default_options`. These are specific
to each :class:`Serializer` implementation.
.. attribute:: default_options
Dictionary of default options which are overwritten during initialisation.
By default it is an empty dictionary.
.. attribute:: options
Dictionary of options.
'''
default_options = {}
arguments = ()
def __init__(self, **options):
opts = self.default_options.copy()
opts.update(((v, options[v]) for v in options if v in self.arguments))
self.options = opts
@property
def data(self):
'''CList of data to dump into a stream.'''
if not hasattr(self, '_data'):
self._data = []
return self._data
def dump(self, qs):
'''Add a :class:`Query` ``qs`` into the collection of :attr:`data`
to dump into a stream. No writing is done until the :meth:`write` method.'''
raise NotImplementedError
def write(self, stream=None):
'''Write the serialized data into a stream. If *stream* is not
provided, a python ``StringIO`` is used.
:return: the stream object.'''
raise NotImplementedError
def load(self, models, stream, model=None):
'''Load a stream of data into the database.
:param models: the :class:`Router` which must contains all the model this
method will load.
:param stream: bytes or an object with a ``read`` method returning bytes.
:param model: Optional :class:`StdModel` we need to load. If not provided all
models in ``stream`` are loaded.
This method must be implemented by subclasses.
'''
raise NotImplementedError
class JsonSerializer(Serializer):
'''The default :class:`Serializer` of :mod:`stdnet`. It
serialise/unserialise models into json data. It has one option given
by the *indent* of the ``json`` string for pretty serialisation.'''
arguments = ('indent',)
def get_data(self, qs):
data = []
for obj in qs:
data.append(obj.tojson())
meta = obj._meta
if data:
return {'model': str(meta),
'hash': meta.hash,
'data': data}
def dump(self, qs):
data = self.get_data(qs)
if data:
self.data.append(data)
def write(self, stream=None):
stream = stream or StringIO()
line = json.dumps(self.data, stream, **self.options)
stream.write(line)
return stream
def load(self, models, stream, model=None):
if hasattr(stream, 'read'):
stream = stream.read()
data = json.loads(stream, **self.options)
for model_data in data:
model = get_model_from_hash(model_data['hash'])
if model:
model = self.on_load_model(model, model_data)
if model:
manager = models[model]
LOGGER.info('Loading model %s', model._meta)
session = manager.session()
with session.begin(signal_commit=False) as t:
for item_data in model_data['data']:
t.add(model.from_base64_data(**item_data))
else:
LOGGER.error('Could not load model %s',
model_data.get('model'))
self.on_finished_load()
def on_load_model(self, model, model_data):
'''Callback when a *model* is about to be loaded. If it returns the
model, the model will get loaded otherwise it will skip the loading.'''
return model
def on_finished_load(self):
'''Callback when loading of data is finished'''
pass
class CsvSerializer(Serializer):
'''A csv serializer for single model. It serialize/unserialize a model
query into a csv file.'''
default_options = {'lineterminator': '\n'}
def dump(self, qs):
if self.data:
raise ValueError('Cannot serialize more than one model into CSV')
fields = None
data = []
for obj in qs:
js = obj.tojson()
if fields is None:
fields = set(js)
else:
fields.update(js)
data.append(js)
meta = obj._meta
ordered_fields = [meta.pkname()]
ordered_fields.extend((f.name for f in meta.scalarfields
if f.name in fields))
data = {'fieldnames': ordered_fields,
'hash': meta.hash,
'data': data}
self.data.append(data)
def write(self, stream=None):
stream = stream or StringIO()
if self.data:
fieldnames = self.data[0]['fieldnames']
data = self.data[0]['data']
if data:
w = csv.DictWriter(stream, fieldnames, **self.options)
writeheader(w)
for row in data:
w.writerow(row)
return stream
def load(self, models, stream, model=None):
if not model:
raise ValueError('Model is required when loading from csv file')
r = csv.DictReader(stream, **self.options)
with models.session().begin() as t:
for item_data in r:
t.add(model.from_base64_data(**item_data))
return t.on_result
register_serializer('json', JsonSerializer)
register_serializer('csv', CsvSerializer) | stdnet/odm/utils.py | import logging
import json
import sys
import csv
from inspect import isclass
from stdnet.utils import StringIO
from .globals import get_model_from_hash
__all__ = ['get_serializer',
'register_serializer',
'unregister_serializer',
'all_serializers',
'Serializer',
'JsonSerializer']
LOGGER = logging.getLogger('stdnet.odm')
_serializers = {}
if sys.version_info < (2, 7): # pragma: no cover
def writeheader(dw):
# hack to handle writeheader in python 2.6
dw.writerow(dict(((k, k) for k in dw.fieldnames)))
else:
def writeheader(dw):
dw.writeheader()
def get_serializer(name, **options):
'''Retrieve a serializer register as *name*. If the serializer is not
available a ``ValueError`` exception will raise.
A common usage pattern::
qs = MyModel.objects.query().sort_by('id')
s = odm.get_serializer('json')
s.dump(qs)
'''
if name in _serializers:
serializer = _serializers[name]
return serializer(**options)
else:
raise ValueError('Unknown serializer {0}.'.format(name))
def register_serializer(name, serializer):
'''\
Register a new serializer to the library.
:parameter name: serializer name (it can override existing serializers).
:parameter serializer: an instance or a derived class of a
:class:`stdnet.odm.Serializer` class or a callable.
'''
if not isclass(serializer):
serializer = serializer.__class__
_serializers[name] = serializer
def unregister_serializer(name):
return _serializers.pop(name, None)
def all_serializers():
return sorted(_serializers)
class Serializer(object):
'''The stdnet serializer base class. During initialization, the *options*
dictionary is used to override the :attr:`default_options`. These are specific
to each :class:`Serializer` implementation.
.. attribute:: default_options
Dictionary of default options which are overwritten during initialisation.
By default it is an empty dictionary.
.. attribute:: options
Dictionary of options.
'''
default_options = {}
arguments = ()
def __init__(self, **options):
opts = self.default_options.copy()
opts.update(((v, options[v]) for v in options if v in self.arguments))
self.options = opts
@property
def data(self):
'''CList of data to dump into a stream.'''
if not hasattr(self, '_data'):
self._data = []
return self._data
def dump(self, qs):
'''Add a :class:`Query` ``qs`` into the collection of :attr:`data`
to dump into a stream. No writing is done until the :meth:`write` method.'''
raise NotImplementedError
def write(self, stream=None):
'''Write the serialized data into a stream. If *stream* is not
provided, a python ``StringIO`` is used.
:return: the stream object.'''
raise NotImplementedError
def load(self, models, stream, model=None):
'''Load a stream of data into the database.
:param models: the :class:`Router` which must contains all the model this
method will load.
:param stream: bytes or an object with a ``read`` method returning bytes.
:param model: Optional :class:`StdModel` we need to load. If not provided all
models in ``stream`` are loaded.
This method must be implemented by subclasses.
'''
raise NotImplementedError
class JsonSerializer(Serializer):
'''The default :class:`Serializer` of :mod:`stdnet`. It
serialise/unserialise models into json data. It has one option given
by the *indent* of the ``json`` string for pretty serialisation.'''
arguments = ('indent',)
def get_data(self, qs):
data = []
for obj in qs:
data.append(obj.tojson())
meta = obj._meta
if data:
return {'model': str(meta),
'hash': meta.hash,
'data': data}
def dump(self, qs):
data = self.get_data(qs)
if data:
self.data.append(data)
def write(self, stream=None):
stream = stream or StringIO()
line = json.dumps(self.data, stream, **self.options)
stream.write(line)
return stream
def load(self, models, stream, model=None):
if hasattr(stream, 'read'):
stream = stream.read()
data = json.loads(stream, **self.options)
for model_data in data:
model = get_model_from_hash(model_data['hash'])
if model:
model = self.on_load_model(model, model_data)
if model:
manager = models[model]
LOGGER.info('Loading model %s', model._meta)
session = manager.session()
with session.begin(signal_commit=False) as t:
for item_data in model_data['data']:
t.add(model.from_base64_data(**item_data))
else:
LOGGER.error('Could not load model %s',
model_data.get('model'))
self.on_finished_load()
def on_load_model(self, model, model_data):
'''Callback when a *model* is about to be loaded. If it returns the
model, the model will get loaded otherwise it will skip the loading.'''
return model
def on_finished_load(self):
'''Callback when loading of data is finished'''
pass
class CsvSerializer(Serializer):
'''A csv serializer for single model. It serialize/unserialize a model
query into a csv file.'''
default_options = {'lineterminator': '\n'}
def dump(self, qs):
if self.data:
raise ValueError('Cannot serialize more than one model into CSV')
fields = None
data = []
for obj in qs:
js = obj.tojson()
if fields is None:
fields = set(js)
else:
fields.update(js)
data.append(js)
meta = obj._meta
ordered_fields = [meta.pkname()]
ordered_fields.extend((f.name for f in meta.scalarfields
if f.name in fields))
data = {'fieldnames': ordered_fields,
'hash': meta.hash,
'data': data}
self.data.append(data)
def write(self, stream=None):
stream = stream or StringIO()
if self.data:
fieldnames = self.data[0]['fieldnames']
data = self.data[0]['data']
if data:
w = csv.DictWriter(stream, fieldnames, **self.options)
writeheader(w)
for row in data:
w.writerow(row)
return stream
def load(self, models, stream, model=None):
if not model:
raise ValueError('Model is required when loading from csv file')
r = csv.DictReader(stream, **self.options)
with models.session().begin() as t:
for item_data in r:
t.add(model.from_base64_data(**item_data))
return t.on_result
register_serializer('json', JsonSerializer)
register_serializer('csv', CsvSerializer) | 0.529993 | 0.119948 |
import os
import sys
import requests
import argparse
from lxml import etree
URL_BASE = 'https://www.reddit.com'
def get_arg():
parser = argparse.ArgumentParser(description='Change your wallpaper by the last one posted in reddit')
parser.add_argument('--sub', dest='subreddit', type=str, help='type an subreddit')
args = parser.parse_args()
return args.subreddit
def get_url(str):
return '{}/{}'.format(URL_BASE, str)
def connect(site):
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'}
try:
html = requests.get(site, headers=headers)
except:
html = requests.get(site, headers=headers, verify=False)
if html.status_code is not 200:
raise BaseException('Error {}. Invalid Subreddit'.format(html.status_code))
page = html.text
tree = etree.HTML(page)
return tree
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.3f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.3f%s%s" % (num, 'Yi', suffix)
def download(url):
name = url.split('/')[-1]
folder = 'wallpaper'
img_path = os.path.join(folder, name)
if not os.path.exists(folder):
os.makedirs(folder)
if not os.path.exists(img_path):
print('Downloading {}'.format(name))
with open(img_path,'wb') as f:
result = requests.get(url, stream=True)
total_length = result.headers.get('content-length')
dl = 0
total_length = int(total_length) if total_length else None
for data in result.iter_content(chunk_size=(1024)):
dl += len(data)
f.write(data)
if total_length:
done = int(50 * dl / total_length)
sys.stdout.write("\r[%s%s] (%s/%s) " % (
'=' * done, ' ' * (50 - done),
sizeof_fmt(dl), sizeof_fmt(total_length)))
sys.stdout.flush()
else:
sys.stdout.write("\rDownloaded %s so far... " % sizeof_fmt(dl))
os.system("gsettings set org.gnome.desktop.background picture-uri file:{}".format(os.path.abspath(img_path)))
# os.system("feh --bg-fill {}".format(os.path.abspath(img_path)))
else:
print("Wallpapers is up to date")
def crawler(arg, start=0):
sub = 'r/{}/'.format(arg)
url = get_url(sub)
print ('Acessing {}'.format(url))
tree = connect(url)
first_wallpaper = tree.xpath("//div[@class='entry unvoted']/p/a/@href")[start::2]
if first_wallpaper[0].endswith('.jpg') or first_wallpaper[0].endswith('.png'):
url = first_wallpaper[0]
download(url)
else:
try:
url = get_url(first_wallpaper[0])
tree = connect(url)
img_url = tree.xpath("//div[@class='media-preview-content']/a/@href")[0]
download(img_url)
except:
start += 2
crawler(arg, start=start)
def main():
if get_arg():
sub = get_arg()
else:
sub = 'wallpapers'
crawler(sub)
if __name__ == '__main__':
main() | wallpaper.py | import os
import sys
import requests
import argparse
from lxml import etree
URL_BASE = 'https://www.reddit.com'
def get_arg():
parser = argparse.ArgumentParser(description='Change your wallpaper by the last one posted in reddit')
parser.add_argument('--sub', dest='subreddit', type=str, help='type an subreddit')
args = parser.parse_args()
return args.subreddit
def get_url(str):
return '{}/{}'.format(URL_BASE, str)
def connect(site):
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'}
try:
html = requests.get(site, headers=headers)
except:
html = requests.get(site, headers=headers, verify=False)
if html.status_code is not 200:
raise BaseException('Error {}. Invalid Subreddit'.format(html.status_code))
page = html.text
tree = etree.HTML(page)
return tree
def sizeof_fmt(num, suffix='B'):
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(num) < 1024.0:
return "%3.3f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.3f%s%s" % (num, 'Yi', suffix)
def download(url):
name = url.split('/')[-1]
folder = 'wallpaper'
img_path = os.path.join(folder, name)
if not os.path.exists(folder):
os.makedirs(folder)
if not os.path.exists(img_path):
print('Downloading {}'.format(name))
with open(img_path,'wb') as f:
result = requests.get(url, stream=True)
total_length = result.headers.get('content-length')
dl = 0
total_length = int(total_length) if total_length else None
for data in result.iter_content(chunk_size=(1024)):
dl += len(data)
f.write(data)
if total_length:
done = int(50 * dl / total_length)
sys.stdout.write("\r[%s%s] (%s/%s) " % (
'=' * done, ' ' * (50 - done),
sizeof_fmt(dl), sizeof_fmt(total_length)))
sys.stdout.flush()
else:
sys.stdout.write("\rDownloaded %s so far... " % sizeof_fmt(dl))
os.system("gsettings set org.gnome.desktop.background picture-uri file:{}".format(os.path.abspath(img_path)))
# os.system("feh --bg-fill {}".format(os.path.abspath(img_path)))
else:
print("Wallpapers is up to date")
def crawler(arg, start=0):
sub = 'r/{}/'.format(arg)
url = get_url(sub)
print ('Acessing {}'.format(url))
tree = connect(url)
first_wallpaper = tree.xpath("//div[@class='entry unvoted']/p/a/@href")[start::2]
if first_wallpaper[0].endswith('.jpg') or first_wallpaper[0].endswith('.png'):
url = first_wallpaper[0]
download(url)
else:
try:
url = get_url(first_wallpaper[0])
tree = connect(url)
img_url = tree.xpath("//div[@class='media-preview-content']/a/@href")[0]
download(img_url)
except:
start += 2
crawler(arg, start=start)
def main():
if get_arg():
sub = get_arg()
else:
sub = 'wallpapers'
crawler(sub)
if __name__ == '__main__':
main() | 0.192388 | 0.1011 |
import string
import random
import re
class ProjectHelper:
def __init__(self, app):
self.app = app
projects_cache = None
def open_projects_page(self):
wd = self.app.wd
if not wd.current_url.endswith("/manage_proj_page.php"):
#the number of elements in menu depends on existance of projects, therefore the try-except is used
try:
wd.find_element_by_xpath("//div[@class='nav-wrap']/ul/li[7]/a/i").click()
except Exception:
wd.find_element_by_xpath("//div[@class='nav-wrap']/ul/li[6]/a/i").click()
wd.find_element_by_xpath("//div[2]/div[2]/div[2]/div/ul/li[3]/a").click()
def add_project(self, project_name):
wd = self.app.wd
self.open_projects_page()
wd.find_element_by_css_selector('input.btn').click()
wd.find_element_by_id('project-name').click()
wd.find_element_by_id('project-name').clear()
wd.find_element_by_id('project-name').send_keys(project_name)
wd.find_element_by_css_selector('input.btn').click()
wd.find_element_by_css_selector('a.btn').click()
self.projects_cache = None
def delete_project(self, project_name):
wd = self.app.wd
self.open_projects_page()
wd.find_element_by_link_text(project_name).click()
wd.find_element_by_xpath("//form[@id='project-delete-form']/fieldset/input[3]").click()
wd.find_element_by_css_selector('input.btn').click()
self.projects_cache = None
def get_projects_list(self):
if self.projects_cache is None:
wd = self.app.wd
self.open_projects_page()
self.projects_cache = []
for element in wd.find_elements_by_xpath("//tbody/tr/td/a"):
text = element.text
self.projects_cache.append(text)
return self.projects_cache
def random_name(self):
sym = string.ascii_letters + string.digits + " "*10
return re.sub('\s+', ' ', ("".join([random.choice(sym) for i in range(random.randint(3, 20))]).rstrip())) | fixture/project.py | import string
import random
import re
class ProjectHelper:
def __init__(self, app):
self.app = app
projects_cache = None
def open_projects_page(self):
wd = self.app.wd
if not wd.current_url.endswith("/manage_proj_page.php"):
#the number of elements in menu depends on existance of projects, therefore the try-except is used
try:
wd.find_element_by_xpath("//div[@class='nav-wrap']/ul/li[7]/a/i").click()
except Exception:
wd.find_element_by_xpath("//div[@class='nav-wrap']/ul/li[6]/a/i").click()
wd.find_element_by_xpath("//div[2]/div[2]/div[2]/div/ul/li[3]/a").click()
def add_project(self, project_name):
wd = self.app.wd
self.open_projects_page()
wd.find_element_by_css_selector('input.btn').click()
wd.find_element_by_id('project-name').click()
wd.find_element_by_id('project-name').clear()
wd.find_element_by_id('project-name').send_keys(project_name)
wd.find_element_by_css_selector('input.btn').click()
wd.find_element_by_css_selector('a.btn').click()
self.projects_cache = None
def delete_project(self, project_name):
wd = self.app.wd
self.open_projects_page()
wd.find_element_by_link_text(project_name).click()
wd.find_element_by_xpath("//form[@id='project-delete-form']/fieldset/input[3]").click()
wd.find_element_by_css_selector('input.btn').click()
self.projects_cache = None
def get_projects_list(self):
if self.projects_cache is None:
wd = self.app.wd
self.open_projects_page()
self.projects_cache = []
for element in wd.find_elements_by_xpath("//tbody/tr/td/a"):
text = element.text
self.projects_cache.append(text)
return self.projects_cache
def random_name(self):
sym = string.ascii_letters + string.digits + " "*10
return re.sub('\s+', ' ', ("".join([random.choice(sym) for i in range(random.randint(3, 20))]).rstrip())) | 0.078787 | 0.04365 |
import logging
from ..tools import (constants, helpers, HomieDiscoveryBase, STAGE_0, STAGE_1, STAGE_2)
from .homie_node import HomieNode
_LOGGER = logging.getLogger(__name__)
class HomieDevice(HomieDiscoveryBase):
"""A definition of a Homie Device"""
def __init__(self, base_topic: str, device_id: str):
super().__init__()
_LOGGER.info(f"Homie Device Discovered. ID: {device_id}")
self._base_topic = base_topic
self._device_id = device_id
self._prefix_topic = f'{base_topic}/{device_id}'
self._homie_nodes = dict()
self._convention_version = constants.STATE_UNKNOWN
self._online = constants.STATE_UNKNOWN
self._name = constants.STATE_UNKNOWN
self._ip = constants.STATE_UNKNOWN
self._mac = constants.STATE_UNKNOWN
self._uptime = constants.STATE_UNKNOWN
self._signal = constants.STATE_UNKNOWN
self._stats_interval = constants.STATE_UNKNOWN
self._fw_name = constants.STATE_UNKNOWN
self._fw_version = constants.STATE_UNKNOWN
self._fw_checksum = constants.STATE_UNKNOWN
self._implementation = constants.STATE_UNKNOWN
def setup(self, subscribe, publish):
"""
Setup of the Homie Device
This will start the discovery proccess of nodes
Once dicovery proccess of children has compleeted (aka. device is `STAGE_1`),
discovery of all attributes takes place
"""
self._discover_nodes(subscribe, publish)
self.add_on_discovery_stage_change(lambda _, stage: subscribe(f'{self._prefix_topic}/#', self._update), STAGE_1)
def _discover_nodes(self, subscribe, publish):
def _on_discovery_nodes(topic: str, payload: str, msg_qos: int):
for node_id in helpers.proccess_nodes(payload):
if node_id not in self._homie_nodes:
homie_node = HomieNode(self, self._prefix_topic, node_id)
homie_node.add_on_discovery_stage_change(self._check_discovery_stage)
homie_node.setup(subscribe, publish)
self._homie_nodes[node_id] = homie_node
subscribe(f'{self._prefix_topic}/$nodes', _on_discovery_nodes)
def _check_discovery_stage(self, homie_node=None, stage=None):
current_stage = self._stage_of_discovery
if current_stage == STAGE_0:
if helpers.can_advance_stage(STAGE_1, self._homie_nodes):
self._set_discovery_stage(STAGE_1)
if current_stage == STAGE_1:
if helpers.can_advance_stage(STAGE_2, self._homie_nodes) and self._online is not constants.STATE_UNKNOWN:
self._set_discovery_stage(STAGE_2)
def _update(self, topic: str, payload: str, qos: int):
if self._prefix_topic not in topic:
return None
for homie_node in self._homie_nodes.values():
homie_node._update(topic, payload, qos)
topic = topic.replace(self._prefix_topic, '')
# Load Device Properties
if topic == '/$homie':
self._convention_version = payload
if topic == '/$online':
self._online = payload
if topic == '/$name':
self._name = payload
if topic == '/$localip':
self._ip = payload
if topic == '/$mac':
self._mac = payload
# Load Device Stats Properties
if topic == '/$stats/uptime':
self._uptime = payload
if topic == '/$stats/signal':
self._signal = payload
if topic == '/$stats/interval':
self._stats_interval = payload
# Load Firmware Properties
if topic == '/$fw/name':
self._fw_name = payload
if topic == '/$fw/version':
self._fw_version = payload
if topic == '/$fw/checksum':
self._fw_checksum = payload
# Load Implementation Properties
if topic == '/$implementation':
self._implementation = payload
# Ready
if topic == '/$online':
self._check_discovery_stage()
@property
def base_topic(self):
"""Return the Base Topic of the device."""
return self._base_topic
@property
def device_id(self):
"""Return the Device ID of the device."""
return self._device_id
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def homie_version(self):
"""Return the Homie Framework Version of the device."""
return self._convention_version
@property
def online(self) -> bool:
"""Return true if the device is online."""
return helpers.string_to_bool(self._online)
@property
def ip(self):
"""Return the IP of the device."""
return self._ip
@property
def mac(self):
"""Return the MAC of the device."""
return self._mac
@property
def uptime(self):
"""Return the Uptime of the device."""
return self._uptime
@property
def signal(self):
"""Return the Signal of the device."""
return self._signal
@property
def stats_interval(self):
"""Return the Stats Interval of the device."""
return self._stats_interval
@property
def firmware_name(self):
"""Return the Firmware Name of the device."""
return self._fw_name
@property
def firmware_version(self):
"""Return the Firmware Version of the device."""
return self._fw_version
@property
def firmware_checksum(self):
"""Return the Firmware Checksum of the device."""
return self._fw_checksum
@property
def is_setup(self):
"""Return True if the Device has been setup as a component"""
return self.stage_of_discovery >= STAGE_2
@property
def nodes(self):
"""Return a List of Nodes for the device."""
return self._homie_nodes.values()
def get_node(self, node_id):
"""Return a specific Node for the device."""
return self._homie_nodes[node_id]
def has_node(self, node_id: str):
"""Return True if specific Node for the Device exists."""
return node_id in self._homie_nodes
@property
def entity_id(self):
"""Return the ID of the entity."""
return self.device_id | homie/models/homie_device.py |
import logging
from ..tools import (constants, helpers, HomieDiscoveryBase, STAGE_0, STAGE_1, STAGE_2)
from .homie_node import HomieNode
_LOGGER = logging.getLogger(__name__)
class HomieDevice(HomieDiscoveryBase):
"""A definition of a Homie Device"""
def __init__(self, base_topic: str, device_id: str):
super().__init__()
_LOGGER.info(f"Homie Device Discovered. ID: {device_id}")
self._base_topic = base_topic
self._device_id = device_id
self._prefix_topic = f'{base_topic}/{device_id}'
self._homie_nodes = dict()
self._convention_version = constants.STATE_UNKNOWN
self._online = constants.STATE_UNKNOWN
self._name = constants.STATE_UNKNOWN
self._ip = constants.STATE_UNKNOWN
self._mac = constants.STATE_UNKNOWN
self._uptime = constants.STATE_UNKNOWN
self._signal = constants.STATE_UNKNOWN
self._stats_interval = constants.STATE_UNKNOWN
self._fw_name = constants.STATE_UNKNOWN
self._fw_version = constants.STATE_UNKNOWN
self._fw_checksum = constants.STATE_UNKNOWN
self._implementation = constants.STATE_UNKNOWN
def setup(self, subscribe, publish):
"""
Setup of the Homie Device
This will start the discovery proccess of nodes
Once dicovery proccess of children has compleeted (aka. device is `STAGE_1`),
discovery of all attributes takes place
"""
self._discover_nodes(subscribe, publish)
self.add_on_discovery_stage_change(lambda _, stage: subscribe(f'{self._prefix_topic}/#', self._update), STAGE_1)
def _discover_nodes(self, subscribe, publish):
def _on_discovery_nodes(topic: str, payload: str, msg_qos: int):
for node_id in helpers.proccess_nodes(payload):
if node_id not in self._homie_nodes:
homie_node = HomieNode(self, self._prefix_topic, node_id)
homie_node.add_on_discovery_stage_change(self._check_discovery_stage)
homie_node.setup(subscribe, publish)
self._homie_nodes[node_id] = homie_node
subscribe(f'{self._prefix_topic}/$nodes', _on_discovery_nodes)
def _check_discovery_stage(self, homie_node=None, stage=None):
current_stage = self._stage_of_discovery
if current_stage == STAGE_0:
if helpers.can_advance_stage(STAGE_1, self._homie_nodes):
self._set_discovery_stage(STAGE_1)
if current_stage == STAGE_1:
if helpers.can_advance_stage(STAGE_2, self._homie_nodes) and self._online is not constants.STATE_UNKNOWN:
self._set_discovery_stage(STAGE_2)
def _update(self, topic: str, payload: str, qos: int):
if self._prefix_topic not in topic:
return None
for homie_node in self._homie_nodes.values():
homie_node._update(topic, payload, qos)
topic = topic.replace(self._prefix_topic, '')
# Load Device Properties
if topic == '/$homie':
self._convention_version = payload
if topic == '/$online':
self._online = payload
if topic == '/$name':
self._name = payload
if topic == '/$localip':
self._ip = payload
if topic == '/$mac':
self._mac = payload
# Load Device Stats Properties
if topic == '/$stats/uptime':
self._uptime = payload
if topic == '/$stats/signal':
self._signal = payload
if topic == '/$stats/interval':
self._stats_interval = payload
# Load Firmware Properties
if topic == '/$fw/name':
self._fw_name = payload
if topic == '/$fw/version':
self._fw_version = payload
if topic == '/$fw/checksum':
self._fw_checksum = payload
# Load Implementation Properties
if topic == '/$implementation':
self._implementation = payload
# Ready
if topic == '/$online':
self._check_discovery_stage()
@property
def base_topic(self):
"""Return the Base Topic of the device."""
return self._base_topic
@property
def device_id(self):
"""Return the Device ID of the device."""
return self._device_id
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def homie_version(self):
"""Return the Homie Framework Version of the device."""
return self._convention_version
@property
def online(self) -> bool:
"""Return true if the device is online."""
return helpers.string_to_bool(self._online)
@property
def ip(self):
"""Return the IP of the device."""
return self._ip
@property
def mac(self):
"""Return the MAC of the device."""
return self._mac
@property
def uptime(self):
"""Return the Uptime of the device."""
return self._uptime
@property
def signal(self):
"""Return the Signal of the device."""
return self._signal
@property
def stats_interval(self):
"""Return the Stats Interval of the device."""
return self._stats_interval
@property
def firmware_name(self):
"""Return the Firmware Name of the device."""
return self._fw_name
@property
def firmware_version(self):
"""Return the Firmware Version of the device."""
return self._fw_version
@property
def firmware_checksum(self):
"""Return the Firmware Checksum of the device."""
return self._fw_checksum
@property
def is_setup(self):
"""Return True if the Device has been setup as a component"""
return self.stage_of_discovery >= STAGE_2
@property
def nodes(self):
"""Return a List of Nodes for the device."""
return self._homie_nodes.values()
def get_node(self, node_id):
"""Return a specific Node for the device."""
return self._homie_nodes[node_id]
def has_node(self, node_id: str):
"""Return True if specific Node for the Device exists."""
return node_id in self._homie_nodes
@property
def entity_id(self):
"""Return the ID of the entity."""
return self.device_id | 0.733929 | 0.10325 |
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ImageFolderMemberApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def image_folder_members_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_change_stream_get_with_http_info(**kwargs)
return data
def image_folder_members_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_change_stream_post_with_http_info(**kwargs)
return data
def image_folder_members_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_count_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_count_get_with_http_info(**kwargs)
return data
def image_folder_members_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_find_one_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_find_one_get_with_http_info(**kwargs)
return data
def image_folder_members_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ImageFolderMember]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_get_with_http_info(**kwargs)
return data
def image_folder_members_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ImageFolderMember]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ImageFolderMember]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_delete_with_http_info(id, **kwargs)
return data
def image_folder_members_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_delete`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_exists_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_exists_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_folder_get(self, id, **kwargs):
"""
Fetches belongsTo relation folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_folder_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: ImageFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_folder_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_folder_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_folder_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_folder_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: ImageFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_folder_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_folder_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/folder'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolder',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_head_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_head_with_http_info(id, **kwargs)
return data
def image_folder_members_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_head`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_member_get(self, id, **kwargs):
"""
Fetches belongsTo relation member.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_member_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_member_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_member_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_member_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation member.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_member_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_member_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_member_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/member'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param ImageFolderMember data: An object of model property name/value pairs
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_patch_with_http_info(id, **kwargs)
return data
def image_folder_members_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param ImageFolderMember data: An object of model property name/value pairs
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_patch`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_put_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_put_with_http_info(id, **kwargs)
return data
def image_folder_members_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_put`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_replace_post_with_http_info(id, **kwargs)
return data
def image_folder_members_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_replace_post`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_post_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_post_with_http_info(**kwargs)
return data
def image_folder_members_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats) | TweakApi/apis/image_folder_member_api.py | from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ImageFolderMemberApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def image_folder_members_change_stream_get(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_change_stream_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_change_stream_get_with_http_info(**kwargs)
return data
def image_folder_members_change_stream_get_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_change_stream_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'options' in params:
query_params['options'] = params['options']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_change_stream_post(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_change_stream_post_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_change_stream_post_with_http_info(**kwargs)
return data
def image_folder_members_change_stream_post_with_http_info(self, **kwargs):
"""
Create a change stream.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_change_stream_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str options:
:return: file
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['options']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_change_stream_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/change-stream'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'options' in params:
form_params.append(('options', params['options']))
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='file',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_count_get(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_count_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_count_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_count_get_with_http_info(**kwargs)
return data
def image_folder_members_count_get_with_http_info(self, **kwargs):
"""
Count instances of the model matched by where from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_count_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str where: Criteria to match model instances
:return: InlineResponse2001
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['where']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_count_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/count'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'where' in params:
query_params['where'] = params['where']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2001',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_find_one_get(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_find_one_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_find_one_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_find_one_get_with_http_info(**kwargs)
return data
def image_folder_members_find_one_get_with_http_info(self, **kwargs):
"""
Find first instance of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_find_one_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_find_one_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers/findOne'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_get(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_get(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ImageFolderMember]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_get_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_get_with_http_info(**kwargs)
return data
def image_folder_members_get_with_http_info(self, **kwargs):
"""
Find all instances of the model matched by filter from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_get_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str filter: Filter defining fields, where, include, order, offset, and limit - must be a JSON-encoded string ({\"something\":\"value\"})
:return: list[ImageFolderMember]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ImageFolderMember]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_delete(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_delete(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_delete_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_delete_with_http_info(id, **kwargs)
return data
def image_folder_members_id_delete_with_http_info(self, id, **kwargs):
"""
Delete a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_delete_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_delete`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_exists_get(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_exists_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_exists_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_exists_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_exists_get_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_exists_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_exists_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_exists_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/exists'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_folder_get(self, id, **kwargs):
"""
Fetches belongsTo relation folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_folder_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: ImageFolder
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_folder_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_folder_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_folder_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation folder.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_folder_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: ImageFolder
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_folder_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_folder_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/folder'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolder',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_get(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_get_with_http_info(self, id, **kwargs):
"""
Find a model instance by {{id}} from the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param str filter: Filter defining fields and include - must be a JSON-encoded string ({\"something\":\"value\"})
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'filter']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'filter' in params:
query_params['filter'] = params['filter']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_head(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_head(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_head_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_head_with_http_info(id, **kwargs)
return data
def image_folder_members_id_head_with_http_info(self, id, **kwargs):
"""
Check whether a model instance exists in the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_head_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:return: InlineResponse2002
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_head`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse2002',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_member_get(self, id, **kwargs):
"""
Fetches belongsTo relation member.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_member_get(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_member_get_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_member_get_with_http_info(id, **kwargs)
return data
def image_folder_members_id_member_get_with_http_info(self, id, **kwargs):
"""
Fetches belongsTo relation member.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_member_get_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param bool refresh:
:return: TeamMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'refresh']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_member_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_member_get`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/member'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'refresh' in params:
query_params['refresh'] = params['refresh']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TeamMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_patch(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_patch(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param ImageFolderMember data: An object of model property name/value pairs
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_patch_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_patch_with_http_info(id, **kwargs)
return data
def image_folder_members_id_patch_with_http_info(self, id, **kwargs):
"""
Patch attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_patch_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: ImageFolderMember id (required)
:param ImageFolderMember data: An object of model property name/value pairs
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_patch`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_put(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_put(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_put_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_put_with_http_info(id, **kwargs)
return data
def image_folder_members_id_put_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_put_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_put`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_id_replace_post(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_replace_post(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_id_replace_post_with_http_info(id, **kwargs)
else:
(data) = self.image_folder_members_id_replace_post_with_http_info(id, **kwargs)
return data
def image_folder_members_id_replace_post_with_http_info(self, id, **kwargs):
"""
Replace attributes for a model instance and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_id_replace_post_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: Model id (required)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_id_replace_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `image_folder_members_id_replace_post`")
collection_formats = {}
resource_path = '/ImageFolderMembers/{id}/replace'.replace('{format}', 'json')
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats)
def image_folder_members_post(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_post(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.image_folder_members_post_with_http_info(**kwargs)
else:
(data) = self.image_folder_members_post_with_http_info(**kwargs)
return data
def image_folder_members_post_with_http_info(self, **kwargs):
"""
Create a new instance of the model and persist it into the data source.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.image_folder_members_post_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ImageFolderMember data: Model instance data
:return: ImageFolderMember
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['data']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method image_folder_members_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/ImageFolderMembers'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in params:
body_params = params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml', 'text/xml', 'application/javascript', 'text/javascript'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/x-www-form-urlencoded', 'application/xml', 'text/xml'])
# Authentication setting
auth_settings = ['access_token']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImageFolderMember',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
collection_formats=collection_formats) | 0.622918 | 0.060808 |
import os
import random
import string
import shutil
import argparse
class RandFS:
def __init__(self, seed=None, max_dirs=1, max_depth=1, max_files=1,
max_size=1):
self._rand_set = string.ascii_uppercase + \
string.digits + \
string.ascii_lowercase
self._top = len(os.getcwd().split(os.sep))
if not seed:
self._max_dirs, self._max_depth, self._max_files, self._max_size = max_dirs, max_depth, max_files, max_size
self.seed = '.'.join(map(str, [self._max_dirs, self._max_depth, self._max_files, self._max_size]))
else:
self._max_dirs, self._max_depth, self._max_files, self._max_size = tuple(map(int, seed.split('.')))
self.seed = seed
random.seed(self.seed)
def _get_random_name(self):
return ''.join(
random.choice(self._rand_set) for _ in range(10))
def _is_max_depth_reached(self):
depth = len(os.getcwd().split(os.sep))
return depth - self._top > self._max_depth
def _generate_folders(self):
path = os.getcwd()
dir_list = []
num_dirs = random.randrange(self._max_dirs)
for _ in range(num_dirs):
folder_name = self._get_random_name()
if not os.path.isdir(folder_name):
os.mkdir(folder_name)
dir_list.append(os.path.join(path, folder_name))
return dir_list
def _generate_files(self):
number_files = random.randrange(self._max_files)
for _ in range(number_files):
file_name = self._get_random_name()
file_size = random.randrange(self._max_size)
with open(file_name, 'wb') as f:
f.write(os.urandom(file_size))
def create_fs(self, path=None):
if path is None:
if os.path.isdir('test'):
shutil.rmtree('test')
os.mkdir('test')
os.chdir(os.path.join(os.getcwd(), 'test'))
else:
os.chdir(path)
self._generate_files()
if self._is_max_depth_reached():
return
for folder in self._generate_folders():
self.create_fs(folder)
return self
def main(args=None):
"""
:param args:
:return: 0 if everything is OK
1 if something went wrong
2 if invalid usage
"""
parser = argparse.ArgumentParser(
description='Generate pseudo random file tree.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--max-files', default=2, type=int, help='Max amount of files in each folder')
parser.add_argument('--max-depth', default=3, type=int, help='Max depth of a tree')
parser.add_argument('--max-size', default=2, type=int, help='Max size of a file')
parser.add_argument('--max-dirs', default=5, type=int, help='Max amount of folders in each folder')
parser.add_argument('--hash-str', default='', type=str, help='String will be used for reproducing a '
'previously created file tree')
opts = parser.parse_args(args)
fs = RandFS(max_depth=opts.max_depth, max_files=opts.max_files, max_dirs=opts.max_dirs, max_size=opts.max_size,
seed=opts.hash_str)
print(fs.create_fs().seed)
if __name__ == '__main__':
main() | radnfs/randfs.py |
import os
import random
import string
import shutil
import argparse
class RandFS:
def __init__(self, seed=None, max_dirs=1, max_depth=1, max_files=1,
max_size=1):
self._rand_set = string.ascii_uppercase + \
string.digits + \
string.ascii_lowercase
self._top = len(os.getcwd().split(os.sep))
if not seed:
self._max_dirs, self._max_depth, self._max_files, self._max_size = max_dirs, max_depth, max_files, max_size
self.seed = '.'.join(map(str, [self._max_dirs, self._max_depth, self._max_files, self._max_size]))
else:
self._max_dirs, self._max_depth, self._max_files, self._max_size = tuple(map(int, seed.split('.')))
self.seed = seed
random.seed(self.seed)
def _get_random_name(self):
return ''.join(
random.choice(self._rand_set) for _ in range(10))
def _is_max_depth_reached(self):
depth = len(os.getcwd().split(os.sep))
return depth - self._top > self._max_depth
def _generate_folders(self):
path = os.getcwd()
dir_list = []
num_dirs = random.randrange(self._max_dirs)
for _ in range(num_dirs):
folder_name = self._get_random_name()
if not os.path.isdir(folder_name):
os.mkdir(folder_name)
dir_list.append(os.path.join(path, folder_name))
return dir_list
def _generate_files(self):
number_files = random.randrange(self._max_files)
for _ in range(number_files):
file_name = self._get_random_name()
file_size = random.randrange(self._max_size)
with open(file_name, 'wb') as f:
f.write(os.urandom(file_size))
def create_fs(self, path=None):
if path is None:
if os.path.isdir('test'):
shutil.rmtree('test')
os.mkdir('test')
os.chdir(os.path.join(os.getcwd(), 'test'))
else:
os.chdir(path)
self._generate_files()
if self._is_max_depth_reached():
return
for folder in self._generate_folders():
self.create_fs(folder)
return self
def main(args=None):
"""
:param args:
:return: 0 if everything is OK
1 if something went wrong
2 if invalid usage
"""
parser = argparse.ArgumentParser(
description='Generate pseudo random file tree.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--max-files', default=2, type=int, help='Max amount of files in each folder')
parser.add_argument('--max-depth', default=3, type=int, help='Max depth of a tree')
parser.add_argument('--max-size', default=2, type=int, help='Max size of a file')
parser.add_argument('--max-dirs', default=5, type=int, help='Max amount of folders in each folder')
parser.add_argument('--hash-str', default='', type=str, help='String will be used for reproducing a '
'previously created file tree')
opts = parser.parse_args(args)
fs = RandFS(max_depth=opts.max_depth, max_files=opts.max_files, max_dirs=opts.max_dirs, max_size=opts.max_size,
seed=opts.hash_str)
print(fs.create_fs().seed)
if __name__ == '__main__':
main() | 0.40204 | 0.113309 |
import time
from paddlespeech.cli.log import logger
from paddlespeech.server.engine.engine_pool import get_engine_pool
def warm_up(engine_and_type: str, warm_up_time: int=3) -> bool:
engine_pool = get_engine_pool()
if "tts" in engine_and_type:
tts_engine = engine_pool['tts']
flag_online = False
if tts_engine.lang == 'zh':
sentence = "您好,欢迎使用语音合成服务。"
elif tts_engine.lang == 'en':
sentence = "Hello and welcome to the speech synthesis service."
else:
logger.error("tts engine only support lang: zh or en.")
sys.exit(-1)
if engine_and_type == "tts_python":
from paddlespeech.server.engine.tts.python.tts_engine import PaddleTTSConnectionHandler
elif engine_and_type == "tts_inference":
from paddlespeech.server.engine.tts.paddleinference.tts_engine import PaddleTTSConnectionHandler
elif engine_and_type == "tts_online":
from paddlespeech.server.engine.tts.online.python.tts_engine import PaddleTTSConnectionHandler
flag_online = True
elif engine_and_type == "tts_online-onnx":
from paddlespeech.server.engine.tts.online.onnx.tts_engine import PaddleTTSConnectionHandler
flag_online = True
else:
logger.error("Please check tte engine type.")
try:
logger.info("Start to warm up tts engine.")
for i in range(warm_up_time):
connection_handler = PaddleTTSConnectionHandler(tts_engine)
if flag_online:
for wav in connection_handler.infer(
text=sentence,
lang=tts_engine.lang,
am=tts_engine.config.am):
logger.info(
f"The first response time of the {i} warm up: {connection_handler.first_response_time} s"
)
break
else:
st = time.time()
connection_handler.infer(text=sentence)
et = time.time()
logger.info(
f"The response time of the {i} warm up: {et - st} s")
except Exception as e:
logger.error("Failed to warm up on tts engine.")
logger.error(e)
return False
else:
pass
return True | paddlespeech/server/engine/engine_warmup.py | import time
from paddlespeech.cli.log import logger
from paddlespeech.server.engine.engine_pool import get_engine_pool
def warm_up(engine_and_type: str, warm_up_time: int=3) -> bool:
engine_pool = get_engine_pool()
if "tts" in engine_and_type:
tts_engine = engine_pool['tts']
flag_online = False
if tts_engine.lang == 'zh':
sentence = "您好,欢迎使用语音合成服务。"
elif tts_engine.lang == 'en':
sentence = "Hello and welcome to the speech synthesis service."
else:
logger.error("tts engine only support lang: zh or en.")
sys.exit(-1)
if engine_and_type == "tts_python":
from paddlespeech.server.engine.tts.python.tts_engine import PaddleTTSConnectionHandler
elif engine_and_type == "tts_inference":
from paddlespeech.server.engine.tts.paddleinference.tts_engine import PaddleTTSConnectionHandler
elif engine_and_type == "tts_online":
from paddlespeech.server.engine.tts.online.python.tts_engine import PaddleTTSConnectionHandler
flag_online = True
elif engine_and_type == "tts_online-onnx":
from paddlespeech.server.engine.tts.online.onnx.tts_engine import PaddleTTSConnectionHandler
flag_online = True
else:
logger.error("Please check tte engine type.")
try:
logger.info("Start to warm up tts engine.")
for i in range(warm_up_time):
connection_handler = PaddleTTSConnectionHandler(tts_engine)
if flag_online:
for wav in connection_handler.infer(
text=sentence,
lang=tts_engine.lang,
am=tts_engine.config.am):
logger.info(
f"The first response time of the {i} warm up: {connection_handler.first_response_time} s"
)
break
else:
st = time.time()
connection_handler.infer(text=sentence)
et = time.time()
logger.info(
f"The response time of the {i} warm up: {et - st} s")
except Exception as e:
logger.error("Failed to warm up on tts engine.")
logger.error(e)
return False
else:
pass
return True | 0.251096 | 0.096365 |
import unittest
import os
from vmaf import project_path, required
from vmaf.config import VmafConfig
from vmaf.core.asset import Asset
from vmaf.core.quality_runner import VmafossExecQualityRunner
from vmaf.core.result_store import FileSystemResultStore
__copyright__ = "Copyright 2016-2018, Netflix, Inc."
__license__ = "Apache, Version 2.0"
class LibDynRunner(VmafossExecQualityRunner):
TYPE = "TESTLIBDYN"
def _get_exec(self):
return required(project_path(os.path.join("src", "libvmaf", "testlibdyn")))
class QualityRunnerTest(unittest.TestCase):
def tearDown(self):
if hasattr(self, 'runner'):
self.runner.remove_results()
pass
def setUp(self):
self.result_store = FileSystemResultStore()
def test_run_testlibdyn_runner(self):
print('test on running TESTLIBDYN runner...')
ref_path = VmafConfig.test_resource_path("yuv", "src01_hrc00_576x324.yuv")
dis_path = VmafConfig.test_resource_path("yuv", "src01_hrc01_576x324.yuv")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width':576, 'height':324})
asset_original = Asset(dataset="test", content_id=0, asset_id=1,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=ref_path,
asset_dict={'width':576, 'height':324})
self.runner = LibDynRunner(
[asset, asset_original],
None, fifo_mode=True,
delete_workdir=True,
result_store=None,
)
self.runner.run()
results = self.runner.results
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale0_score'],0.363420458333, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale1_score'], 0.766647520833, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale2_score'], 0.862854708333, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale3_score'], 0.915971791667, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_adm2_score'], 0.93458777083333333, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_psnr_score'], 30.7550666667, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_ssim_score'], 0.86322654166666657, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_ms_ssim_score'], 0.9632498125, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale1_score'],0.999999958333, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale2_score'],0.999999416667, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale3_score'], 0.999999208333, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_psnr_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_ms_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_score'], 76.699271272486044, places=3)
self.assertAlmostEqual(results[1]['TESTLIBDYN_score'],99.946416604585025, places=4) | python/test/lib/libvmaf_libdyntest.py | import unittest
import os
from vmaf import project_path, required
from vmaf.config import VmafConfig
from vmaf.core.asset import Asset
from vmaf.core.quality_runner import VmafossExecQualityRunner
from vmaf.core.result_store import FileSystemResultStore
__copyright__ = "Copyright 2016-2018, Netflix, Inc."
__license__ = "Apache, Version 2.0"
class LibDynRunner(VmafossExecQualityRunner):
TYPE = "TESTLIBDYN"
def _get_exec(self):
return required(project_path(os.path.join("src", "libvmaf", "testlibdyn")))
class QualityRunnerTest(unittest.TestCase):
def tearDown(self):
if hasattr(self, 'runner'):
self.runner.remove_results()
pass
def setUp(self):
self.result_store = FileSystemResultStore()
def test_run_testlibdyn_runner(self):
print('test on running TESTLIBDYN runner...')
ref_path = VmafConfig.test_resource_path("yuv", "src01_hrc00_576x324.yuv")
dis_path = VmafConfig.test_resource_path("yuv", "src01_hrc01_576x324.yuv")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width':576, 'height':324})
asset_original = Asset(dataset="test", content_id=0, asset_id=1,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=ref_path,
asset_dict={'width':576, 'height':324})
self.runner = LibDynRunner(
[asset, asset_original],
None, fifo_mode=True,
delete_workdir=True,
result_store=None,
)
self.runner.run()
results = self.runner.results
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale0_score'],0.363420458333, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale1_score'], 0.766647520833, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale2_score'], 0.862854708333, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_vif_scale3_score'], 0.915971791667, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_adm2_score'], 0.93458777083333333, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_psnr_score'], 30.7550666667, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_ssim_score'], 0.86322654166666657, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_ms_ssim_score'], 0.9632498125, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale1_score'],0.999999958333, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale2_score'],0.999999416667, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_vif_scale3_score'], 0.999999208333, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_psnr_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['TESTLIBDYN_ms_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['TESTLIBDYN_score'], 76.699271272486044, places=3)
self.assertAlmostEqual(results[1]['TESTLIBDYN_score'],99.946416604585025, places=4) | 0.443841 | 0.316937 |
from fastapi.middleware.cors import CORSMiddleware
import os, logging
from sys import stderr
LOG_FORMAT = "[%(asctime)s] %(levelname)-8s %(name)-20s %(message)s"
def init_api(api, log):
"""
Initalizes the FastAPI object.
Middleware and logging, basically
"""
origins = [
"http://localhost",
"http://localhost:3000",
"http://localhost:8080",
]
# Add CORS middleware
api.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
## Configure the logging for the app
@api.on_event("startup")
async def startup_event():
""" """
# Additional handlers for logging to file / log mgmt solution
plain_formatter = logging.Formatter(LOG_FORMAT)
# FIXME find a working(!) ANSI code console formatter (colorlog didnt qwork for me in VS code terminal)
console_formatter = logging.Formatter(LOG_FORMAT)
# Apply to root logger
app_root_logger = logging.getLogger("app")
app_root_logger.setLevel(os.getenv("LOGLEVEL", logging.INFO))
uvicorn_log = logging.getLogger("uvicorn")
# Create new handlers and set our standardized formatter
# TODO: use log forwarding to a centralized log mgmt solution / syslog
logfile_handler = logging.FileHandler("./.server.log")
logfile_handler.setFormatter(plain_formatter)
console_handler = logging.StreamHandler(stream=stderr)
console_handler.setFormatter(console_formatter)
# App level log messages should go to stdout/stderr too
app_root_logger.addHandler(console_handler)
app_root_logger.addHandler(logfile_handler)
log.addHandler(console_handler)
log.addHandler(logfile_handler)
uvicorn_log.addHandler(console_handler)
uvicorn_log.addHandler(logfile_handler)
# We're done here...
log.info(f"Started {api.title} , version={api.version}")
@api.on_event("shutdown")
async def shutdown_event():
log.info(f"Shutting down {api.title}") | app/utils.py | from fastapi.middleware.cors import CORSMiddleware
import os, logging
from sys import stderr
LOG_FORMAT = "[%(asctime)s] %(levelname)-8s %(name)-20s %(message)s"
def init_api(api, log):
"""
Initalizes the FastAPI object.
Middleware and logging, basically
"""
origins = [
"http://localhost",
"http://localhost:3000",
"http://localhost:8080",
]
# Add CORS middleware
api.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
## Configure the logging for the app
@api.on_event("startup")
async def startup_event():
""" """
# Additional handlers for logging to file / log mgmt solution
plain_formatter = logging.Formatter(LOG_FORMAT)
# FIXME find a working(!) ANSI code console formatter (colorlog didnt qwork for me in VS code terminal)
console_formatter = logging.Formatter(LOG_FORMAT)
# Apply to root logger
app_root_logger = logging.getLogger("app")
app_root_logger.setLevel(os.getenv("LOGLEVEL", logging.INFO))
uvicorn_log = logging.getLogger("uvicorn")
# Create new handlers and set our standardized formatter
# TODO: use log forwarding to a centralized log mgmt solution / syslog
logfile_handler = logging.FileHandler("./.server.log")
logfile_handler.setFormatter(plain_formatter)
console_handler = logging.StreamHandler(stream=stderr)
console_handler.setFormatter(console_formatter)
# App level log messages should go to stdout/stderr too
app_root_logger.addHandler(console_handler)
app_root_logger.addHandler(logfile_handler)
log.addHandler(console_handler)
log.addHandler(logfile_handler)
uvicorn_log.addHandler(console_handler)
uvicorn_log.addHandler(logfile_handler)
# We're done here...
log.info(f"Started {api.title} , version={api.version}")
@api.on_event("shutdown")
async def shutdown_event():
log.info(f"Shutting down {api.title}") | 0.291888 | 0.072243 |
from flask import Blueprint, request, send_file, jsonify, json, flash
from flask_jwt_extended import jwt_required, create_access_token, get_jwt_identity, jwt_refresh_token_required, create_refresh_token
from marshmallow import ValidationError
from werkzeug.utils import secure_filename
from app.extensions import db, bcrypt
from .forms import LoginForm
from .models import User, Subject_Subscription, Topic_Subscription
from app.post.models import Subject, Topic, Post
from .schema import user_schema, users_schema
from .schema import subjects_subscription_schema
from .schema import topics_subscription_schema
import os
import datetime
from urllib.parse import urlparse
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
userblueprint = Blueprint('user', __name__)
@userblueprint.route('/v1/users/signup/', methods=('POST', ))
def _register_user():
json_data = request.get_json()
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
try:
user_schema.load(json_data)
except ValidationError as err:
return jsonify(err.messages), 422
duplicateuser = User.query.filter_by(
email=json_data['email'].lower()).first()
if duplicateuser:
return jsonify({'message': 'Duplicate user'}), 400
user = User(username=json_data['username'].lower(),
email=json_data['email'].lower(),
firstname=json_data['firstname'].lower(),
lastname=json_data['lastname'].lower(),
password=<PASSWORD>_<PASSWORD>(json_data['password']),
school=json_data['school'].lower())
user.is_teacher = json_data['is_teacher']
db.session.add(user)
db.session.commit()
return jsonify(message="Successful user creation", username=user.username)
@userblueprint.route('/v1/users/delete/', methods=['POST'])
@jwt_required
def _delete_user():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
User.query.filter_by(id=user.id).delete()
#User.query.filter(User.id == 123).delete()
db.session.commit()
return jsonify(message="Successful account deletion"), 200
return jsonify(message="Invalid token")
@userblueprint.route('/users/', methods=('GET', ))
@jwt_required
def _get_user():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user.is_staff:
users = User.query.all()
# HACK: Super hacky, will be removed when front end is updated
dump = users_schema.dump(users, many=True)
result = list()
result.append(dump)
result.append(dict())
return jsonify({'users': result})
return jsonify('forbidden'), 403
@userblueprint.route('/v1/users/login/', methods=('POST', ))
def _login_user():
form = LoginForm()
user = User.query.filter_by(email=form.email.data).first()
if user:
if bcrypt.check_password_hash(user.password, form.password.data):
# authenticate user and resave into db
db.session.add(user)
db.session.commit()
flash('Login requested for user {}'.format(user.email))
expires = datetime.timedelta(days=30)
access_token = create_access_token(
identity=user.username,
expires_delta=expires) # Create access token for user
refresh_token = create_refresh_token(identity=user.username,
expires_delta=expires)
return jsonify(access_token=access_token,
refresh_token=refresh_token), 200
return json.dumps({'Login': False}), 500, {
'ContentType': 'application/json'
}
@userblueprint.route('/v1/users/refresh/', methods=['POST'])
@jwt_refresh_token_required
def refresh():
current_user = get_jwt_identity()
expires = datetime.timedelta(days=30)
access_token = create_access_token(identity=current_user,
expires_delta=expires)
return jsonify(access_token=access_token), 200
@userblueprint.route("/v1/users/logout/", methods=["GET"])
@jwt_required
def _logout():
"""Logout the current user."""
#Could use a blacklist to blacklist tokens but for now we'll wait TODO
# user = current_user
# db.session.add(user)
# db.session.commit()
# logout_user()
@userblueprint.route("/v1/users/auth/", methods=["GET"])
@jwt_required
def _auth():
current_user = get_jwt_identity()
user = User.query.filter_by(username=current_user).first()
if current_user:
return jsonify(logged_in_as=current_user,
user_info={
'email': user.email,
'school': user.school,
'firstname': user.firstname,
'lastname': user.lastname,
'is_staff': user.is_staff
}), 200
return jsonify(logged_in_as=''), 200
@userblueprint.route("/v1/users/setUserImage/", methods=["POST"])
@jwt_required
def _set_image():
current_user = get_jwt_identity()
json_data = request.get_json()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user:
if (urlparse(json_data['url']).scheme == 'http'
or urlparse(json_data['url']).scheme == 'https'):
user.profile_image = json_data['url']
user_posts = Post.query.filter_by(author_id=user.id).all()
for user_post in user_posts:
user_post.author_image = json_data['url']
db.session.commit()
return jsonify('successfully changed image'), 200
if (json_data['url'] == ''):
user.profile_image = 'Avatar.svg'
user_posts = Post.query.filter_by(author_id=user.id).all()
for user_post in user_posts:
user_post.author_image = 'Avatar.svg'
db.session.commit()
return jsonify('successfully changed image to default'), 200
return jsonify('invalid format'), 415
return jsonify('not found'), 404
return jsonify('forbidden'), 403
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@userblueprint.route("/v1/users/getUserImage/", methods=["GET"])
@jwt_required
def _get_image():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
filename = secure_filename(user.profile_image)
return send_file(os.path.join("image_folder/", filename))
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route("/v1/users/changePassword/", methods=['POST'])
@jwt_required
def _change_password():
json_data = request.get_json()
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
password = <PASSWORD>.generate_password_hash(json_data['password'])
user.password = password
db.session.commit()
return jsonify(message="Change password successful"), 200
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route("/v1/users/changeEmail/", methods=['POST'])
@jwt_required
def _change_email():
json_data = request.get_json()
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
email = json_data['email']
user.email = email
db.session.commit()
return jsonify(message="Chang email successful"), 200
return jsonify({'message': "Invalid Token"}), 401
## SUBJECT SUBSCRIPTION
@userblueprint.route("/v1/users/subscribeToSubject/<int:subjectid>/",
methods=['POST'])
@jwt_required
def _subscribe_to_subject(subjectid):
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
subject = Subject.query.filter_by(id=subjectid).first()
existing_subscription = Subject_Subscription.query.filter_by(
user_id=user.id, subject_id=subject.id).first()
if existing_subscription:
db.session.delete(existing_subscription)
db.session.commit()
return jsonify(message="Removed subscription",
id=existing_subscription.id,
user_id=user.id,
subject_id=subject.id), 200
subject_subscription = Subject_Subscription(user_id=user.id,
subject_id=subject.id)
db.session.add(subject_subscription)
db.session.commit()
return jsonify(message=True,
id=subject_subscription.id,
user_id=user.id,
subject_id=subject.id), 200
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route('/v1/users/getAllSubjectSubscriptions/', methods=['GET'])
@jwt_required
def _get_subject_subscriptions_all():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
is_staff = user.is_staff
if is_staff:
subject_subs = Subject_Subscription.query.all()
# HACK: Super hacky, will be removed when front end is updated
dump = subjects_subscription_schema.dump(subject_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
return jsonify({'subject_subs': result}), 200
return jsonify('forbidden'), 403
@userblueprint.route('/v1/users/getMySubjectSubscriptions/', methods=['GET'])
@jwt_required
def _get_user_subject_subscriptions():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
subject_subs = Subject_Subscription.query.filter_by(
user_id=user.id).all()
# HACK: Super hacky, will be removed when front end is updated
dump = subjects_subscription_schema.dump(subject_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([])
## TOPIC SUBSCRIPTION
@userblueprint.route("/v1/users/subscribeToTopic/<int:topicid>/",
methods=['POST'])
@jwt_required
def _subscribe_to_topic(topicid):
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
topic = Topic.query.filter_by(id=topicid).first()
existing_subscription = Topic_Subscription.query.filter_by(
user_id=user.id, topic_id=topic.id).first()
if existing_subscription:
db.session.delete(existing_subscription)
db.session.commit()
return jsonify(message="Removed subscription",
id=existing_subscription.id,
user_id=user.id,
topic_id=topic.id), 200
topic_subscription = Topic_Subscription(user_id=user.id,
topic_id=topic.id)
db.session.add(topic_subscription)
db.session.commit()
return jsonify(message=True,
id=topic_subscription.id,
user_id=user.id,
topic_id=topic.id), 200
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route('/v1/users/getAllTopicSubscriptions/', methods=['GET'])
@jwt_required
def _get_topic_subscriptions_all():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
is_staff = user.is_staff
if is_staff:
topic_subs = Topic_Subscription.query.all()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
return jsonify({'topic_subs': result}), 200
return jsonify('forbidden'), 403
@userblueprint.route('/v1/users/getTopicSubscription/<int:topicid>/',
methods=['GET'])
@jwt_required
def _get_all_topic_subscription(topicid):
"""Gets all of the users subscribed to the topic subscription if you are an admin."""
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user.is_staff:
topic_sub = Topic_Subscription.query.filter_by(
topic_id=topicid).first()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_sub, many=False)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([])
return jsonify('unauthorized'), 403
return jsonify('unauthorized'), 401
@userblueprint.route('/v1/users/getMyTopicSubscription/<int:topicid>/',
methods=['GET'])
@jwt_required
def _get_topic_subscription(topicid):
"""Given a topic id, return the subscription status of the topic."""
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user:
topic_sub = Topic_Subscription.query.filter_by(
topic_id=topicid, user_id=user.id).first()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_sub, many=False)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([])
return jsonify('unauthorized'), 403
return jsonify('unauthorized'), 401
@userblueprint.route('/v1/users/getMyTopicSubscriptions/', methods=['GET'])
@jwt_required
def _get_user_topic_subscription():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
topic_subs = Topic_Subscription.query.filter_by(user_id=user.id).all()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([]) | app/user/views.py | from flask import Blueprint, request, send_file, jsonify, json, flash
from flask_jwt_extended import jwt_required, create_access_token, get_jwt_identity, jwt_refresh_token_required, create_refresh_token
from marshmallow import ValidationError
from werkzeug.utils import secure_filename
from app.extensions import db, bcrypt
from .forms import LoginForm
from .models import User, Subject_Subscription, Topic_Subscription
from app.post.models import Subject, Topic, Post
from .schema import user_schema, users_schema
from .schema import subjects_subscription_schema
from .schema import topics_subscription_schema
import os
import datetime
from urllib.parse import urlparse
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
userblueprint = Blueprint('user', __name__)
@userblueprint.route('/v1/users/signup/', methods=('POST', ))
def _register_user():
json_data = request.get_json()
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
try:
user_schema.load(json_data)
except ValidationError as err:
return jsonify(err.messages), 422
duplicateuser = User.query.filter_by(
email=json_data['email'].lower()).first()
if duplicateuser:
return jsonify({'message': 'Duplicate user'}), 400
user = User(username=json_data['username'].lower(),
email=json_data['email'].lower(),
firstname=json_data['firstname'].lower(),
lastname=json_data['lastname'].lower(),
password=<PASSWORD>_<PASSWORD>(json_data['password']),
school=json_data['school'].lower())
user.is_teacher = json_data['is_teacher']
db.session.add(user)
db.session.commit()
return jsonify(message="Successful user creation", username=user.username)
@userblueprint.route('/v1/users/delete/', methods=['POST'])
@jwt_required
def _delete_user():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
User.query.filter_by(id=user.id).delete()
#User.query.filter(User.id == 123).delete()
db.session.commit()
return jsonify(message="Successful account deletion"), 200
return jsonify(message="Invalid token")
@userblueprint.route('/users/', methods=('GET', ))
@jwt_required
def _get_user():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user.is_staff:
users = User.query.all()
# HACK: Super hacky, will be removed when front end is updated
dump = users_schema.dump(users, many=True)
result = list()
result.append(dump)
result.append(dict())
return jsonify({'users': result})
return jsonify('forbidden'), 403
@userblueprint.route('/v1/users/login/', methods=('POST', ))
def _login_user():
form = LoginForm()
user = User.query.filter_by(email=form.email.data).first()
if user:
if bcrypt.check_password_hash(user.password, form.password.data):
# authenticate user and resave into db
db.session.add(user)
db.session.commit()
flash('Login requested for user {}'.format(user.email))
expires = datetime.timedelta(days=30)
access_token = create_access_token(
identity=user.username,
expires_delta=expires) # Create access token for user
refresh_token = create_refresh_token(identity=user.username,
expires_delta=expires)
return jsonify(access_token=access_token,
refresh_token=refresh_token), 200
return json.dumps({'Login': False}), 500, {
'ContentType': 'application/json'
}
@userblueprint.route('/v1/users/refresh/', methods=['POST'])
@jwt_refresh_token_required
def refresh():
current_user = get_jwt_identity()
expires = datetime.timedelta(days=30)
access_token = create_access_token(identity=current_user,
expires_delta=expires)
return jsonify(access_token=access_token), 200
@userblueprint.route("/v1/users/logout/", methods=["GET"])
@jwt_required
def _logout():
"""Logout the current user."""
#Could use a blacklist to blacklist tokens but for now we'll wait TODO
# user = current_user
# db.session.add(user)
# db.session.commit()
# logout_user()
@userblueprint.route("/v1/users/auth/", methods=["GET"])
@jwt_required
def _auth():
current_user = get_jwt_identity()
user = User.query.filter_by(username=current_user).first()
if current_user:
return jsonify(logged_in_as=current_user,
user_info={
'email': user.email,
'school': user.school,
'firstname': user.firstname,
'lastname': user.lastname,
'is_staff': user.is_staff
}), 200
return jsonify(logged_in_as=''), 200
@userblueprint.route("/v1/users/setUserImage/", methods=["POST"])
@jwt_required
def _set_image():
current_user = get_jwt_identity()
json_data = request.get_json()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user:
if (urlparse(json_data['url']).scheme == 'http'
or urlparse(json_data['url']).scheme == 'https'):
user.profile_image = json_data['url']
user_posts = Post.query.filter_by(author_id=user.id).all()
for user_post in user_posts:
user_post.author_image = json_data['url']
db.session.commit()
return jsonify('successfully changed image'), 200
if (json_data['url'] == ''):
user.profile_image = 'Avatar.svg'
user_posts = Post.query.filter_by(author_id=user.id).all()
for user_post in user_posts:
user_post.author_image = 'Avatar.svg'
db.session.commit()
return jsonify('successfully changed image to default'), 200
return jsonify('invalid format'), 415
return jsonify('not found'), 404
return jsonify('forbidden'), 403
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS
@userblueprint.route("/v1/users/getUserImage/", methods=["GET"])
@jwt_required
def _get_image():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
filename = secure_filename(user.profile_image)
return send_file(os.path.join("image_folder/", filename))
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route("/v1/users/changePassword/", methods=['POST'])
@jwt_required
def _change_password():
json_data = request.get_json()
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
password = <PASSWORD>.generate_password_hash(json_data['password'])
user.password = password
db.session.commit()
return jsonify(message="Change password successful"), 200
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route("/v1/users/changeEmail/", methods=['POST'])
@jwt_required
def _change_email():
json_data = request.get_json()
if not json_data:
return jsonify({'message': 'No input data provided'}), 400
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
email = json_data['email']
user.email = email
db.session.commit()
return jsonify(message="Chang email successful"), 200
return jsonify({'message': "Invalid Token"}), 401
## SUBJECT SUBSCRIPTION
@userblueprint.route("/v1/users/subscribeToSubject/<int:subjectid>/",
methods=['POST'])
@jwt_required
def _subscribe_to_subject(subjectid):
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
subject = Subject.query.filter_by(id=subjectid).first()
existing_subscription = Subject_Subscription.query.filter_by(
user_id=user.id, subject_id=subject.id).first()
if existing_subscription:
db.session.delete(existing_subscription)
db.session.commit()
return jsonify(message="Removed subscription",
id=existing_subscription.id,
user_id=user.id,
subject_id=subject.id), 200
subject_subscription = Subject_Subscription(user_id=user.id,
subject_id=subject.id)
db.session.add(subject_subscription)
db.session.commit()
return jsonify(message=True,
id=subject_subscription.id,
user_id=user.id,
subject_id=subject.id), 200
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route('/v1/users/getAllSubjectSubscriptions/', methods=['GET'])
@jwt_required
def _get_subject_subscriptions_all():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
is_staff = user.is_staff
if is_staff:
subject_subs = Subject_Subscription.query.all()
# HACK: Super hacky, will be removed when front end is updated
dump = subjects_subscription_schema.dump(subject_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
return jsonify({'subject_subs': result}), 200
return jsonify('forbidden'), 403
@userblueprint.route('/v1/users/getMySubjectSubscriptions/', methods=['GET'])
@jwt_required
def _get_user_subject_subscriptions():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
subject_subs = Subject_Subscription.query.filter_by(
user_id=user.id).all()
# HACK: Super hacky, will be removed when front end is updated
dump = subjects_subscription_schema.dump(subject_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([])
## TOPIC SUBSCRIPTION
@userblueprint.route("/v1/users/subscribeToTopic/<int:topicid>/",
methods=['POST'])
@jwt_required
def _subscribe_to_topic(topicid):
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
topic = Topic.query.filter_by(id=topicid).first()
existing_subscription = Topic_Subscription.query.filter_by(
user_id=user.id, topic_id=topic.id).first()
if existing_subscription:
db.session.delete(existing_subscription)
db.session.commit()
return jsonify(message="Removed subscription",
id=existing_subscription.id,
user_id=user.id,
topic_id=topic.id), 200
topic_subscription = Topic_Subscription(user_id=user.id,
topic_id=topic.id)
db.session.add(topic_subscription)
db.session.commit()
return jsonify(message=True,
id=topic_subscription.id,
user_id=user.id,
topic_id=topic.id), 200
return jsonify({'message': "Invalid Token"}), 401
@userblueprint.route('/v1/users/getAllTopicSubscriptions/', methods=['GET'])
@jwt_required
def _get_topic_subscriptions_all():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
is_staff = user.is_staff
if is_staff:
topic_subs = Topic_Subscription.query.all()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
return jsonify({'topic_subs': result}), 200
return jsonify('forbidden'), 403
@userblueprint.route('/v1/users/getTopicSubscription/<int:topicid>/',
methods=['GET'])
@jwt_required
def _get_all_topic_subscription(topicid):
"""Gets all of the users subscribed to the topic subscription if you are an admin."""
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user.is_staff:
topic_sub = Topic_Subscription.query.filter_by(
topic_id=topicid).first()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_sub, many=False)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([])
return jsonify('unauthorized'), 403
return jsonify('unauthorized'), 401
@userblueprint.route('/v1/users/getMyTopicSubscription/<int:topicid>/',
methods=['GET'])
@jwt_required
def _get_topic_subscription(topicid):
"""Given a topic id, return the subscription status of the topic."""
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
if user:
topic_sub = Topic_Subscription.query.filter_by(
topic_id=topicid, user_id=user.id).first()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_sub, many=False)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([])
return jsonify('unauthorized'), 403
return jsonify('unauthorized'), 401
@userblueprint.route('/v1/users/getMyTopicSubscriptions/', methods=['GET'])
@jwt_required
def _get_user_topic_subscription():
current_user = get_jwt_identity()
if current_user:
user = User.query.filter_by(username=current_user).first()
topic_subs = Topic_Subscription.query.filter_by(user_id=user.id).all()
# HACK: Super hacky, will be removed when front end is updated
dump = topics_subscription_schema.dump(topic_subs, many=True)
result = list()
result.append(dump)
result.append(dict())
try:
return jsonify(result[0])
except:
return jsonify([]) | 0.311322 | 0.053999 |
from pytorch_transformers import XLNetModel, XLNetConfig
from onmt.encoders.transformer import EncoderBase
import os
from programmingalpha.models import expandEmbeddingByN
class OnmtXLNetEncoder(EncoderBase):
'''
Returns:
(torch.FloatTensor, torch.FloatTensor):
* embeddings ``(src_len, batch_size, model_dim)``
* memory_bank ``(src_len, batch_size, model_dim)``
'''
def __init__(self, model_path):
super(OnmtXLNetEncoder, self).__init__()
config=XLNetConfig.from_json_file(os.path.join( model_path, "config.json") )
pretrained_dict=os.path.join( model_path, "pytorch_model.bin")
if os.path.exists(pretrained_dict):
model=XLNetModel.from_pretrained(pretrained_model_name_or_path=pretrained_dict, config=config)
print("init XLNet model with {} weights".format(len(model.state_dict())))
else:
model=XLNetModel(config)
model.word_embedding=expandEmbeddingByN(model.word_embedding, 4)
model.word_embedding=expandEmbeddingByN(model.word_embedding, 2, last=True)
self.encoder=model
#print(model)
print("***"*20)
def forward(self, src, lengths=None):
"""
Args:
src (LongTensor):
padded sequences of sparse indices ``(src_len, batch, nfeat)``
lengths (LongTensor): length of each sequence ``(batch,)``
"""
inputids=src.squeeze(2).transpose(0,1).contiguous()
outputs=self.encoder(input_ids=inputids)
#print(len(outputs))
#print(outputs)
emb=outputs[2][-1]
memory_bank=outputs[0]
emb=emb.transpose(0,1).contiguous()
memory_bank=memory_bank.transpose(0,1).contiguous()
#print("src--> outs", src.size(), emb.size(), memory_bank.size())
return emb, memory_bank, lengths
def getWordEmbeddingFromXLNetEncoder(model:OnmtXLNetEncoder):
return model.encoder.word_embedding
def buildXLNet(**kwargs):
if "model_path" not in kwargs:
from programmingalpha import AlphaPathLookUp
kwargs["model_path"] = AlphaPathLookUp.XLNetBaseCased
encoder=OnmtXLNetEncoder(kwargs["model_path"])
return encoder | programmingalpha/models/GenerationNets/XLNetGen.py | from pytorch_transformers import XLNetModel, XLNetConfig
from onmt.encoders.transformer import EncoderBase
import os
from programmingalpha.models import expandEmbeddingByN
class OnmtXLNetEncoder(EncoderBase):
'''
Returns:
(torch.FloatTensor, torch.FloatTensor):
* embeddings ``(src_len, batch_size, model_dim)``
* memory_bank ``(src_len, batch_size, model_dim)``
'''
def __init__(self, model_path):
super(OnmtXLNetEncoder, self).__init__()
config=XLNetConfig.from_json_file(os.path.join( model_path, "config.json") )
pretrained_dict=os.path.join( model_path, "pytorch_model.bin")
if os.path.exists(pretrained_dict):
model=XLNetModel.from_pretrained(pretrained_model_name_or_path=pretrained_dict, config=config)
print("init XLNet model with {} weights".format(len(model.state_dict())))
else:
model=XLNetModel(config)
model.word_embedding=expandEmbeddingByN(model.word_embedding, 4)
model.word_embedding=expandEmbeddingByN(model.word_embedding, 2, last=True)
self.encoder=model
#print(model)
print("***"*20)
def forward(self, src, lengths=None):
"""
Args:
src (LongTensor):
padded sequences of sparse indices ``(src_len, batch, nfeat)``
lengths (LongTensor): length of each sequence ``(batch,)``
"""
inputids=src.squeeze(2).transpose(0,1).contiguous()
outputs=self.encoder(input_ids=inputids)
#print(len(outputs))
#print(outputs)
emb=outputs[2][-1]
memory_bank=outputs[0]
emb=emb.transpose(0,1).contiguous()
memory_bank=memory_bank.transpose(0,1).contiguous()
#print("src--> outs", src.size(), emb.size(), memory_bank.size())
return emb, memory_bank, lengths
def getWordEmbeddingFromXLNetEncoder(model:OnmtXLNetEncoder):
return model.encoder.word_embedding
def buildXLNet(**kwargs):
if "model_path" not in kwargs:
from programmingalpha import AlphaPathLookUp
kwargs["model_path"] = AlphaPathLookUp.XLNetBaseCased
encoder=OnmtXLNetEncoder(kwargs["model_path"])
return encoder | 0.621541 | 0.240006 |
import numpy as np
from region import Region1, Region2
class Model():
def __init__(self, minSize, eval, norm, alpha):
self.minSize = minSize
self.axis = None
self.subAxis = None
self.root = None
self.eval = eval
self.norm = norm
self.alpha = alpha
def bestSeparation(self, parentRegion):
opt = np.inf
jOpt = None
sOpt = None
for j in range(len(parentRegion.getPopulation()[0])):
svalues = np.sort(parentRegion.getPopulation()[:, j])
maxs = max(svalues)
n = len(svalues)
while n > 0 and svalues[-1] == maxs:
svalues = svalues[:-1]
n -= 1
for s in svalues:
r1 = Region1(self, j, s, elements=parentRegion.getPopulation())
r2 = Region2(self, j, s, elements=parentRegion.getPopulation())
loss = r1.getLoss(r1.value) + r2.getLoss(r2.value)
if loss < opt:
opt = loss
jOpt = j
sOpt = s
if jOpt != None:
r1Opt = Region1(self, jOpt, sOpt, elements=parentRegion.getPopulation())
r2Opt = Region2(self, jOpt, sOpt, elements=parentRegion.getPopulation())
return r1Opt, r2Opt
else:
return None, None
def critereCout(self, t, firstTerm=False):
leaves = t.getLeaves()
s = 0
n = len(leaves)
print("n :", n)
for node in leaves:
s += node.getLoss(node.value)
if not firstTerm:
return s + self.alpha * n
else:
return s
def copy(self, region):
if type(region) == Region1:
r = Region1(self, region.j, region.s, elements=region.getPopulation())
else:
r = Region2(self, region.j, region.s, elements=region.getPopulation())
if not region.is_leaf:
for node in region.children:
newnode = self.copy(node)
newnode.parent = r
return r
def elager(self, t):
tOpt = self.copy(t)
currentTree = self.copy(t)
cOpt = self.critereCout(t)
while not currentTree.is_leaf:
leaves = currentTree.getLeaves()
firstTerm = np.inf
bestIndex = 0
for i in range(0, len(leaves), 2):
p = leaves[i].parent
savechildren = [node for node in p.children]
p.children = []
fs = self.critereCout(currentTree, firstTerm=True)
if fs < firstTerm:
firstTerm = fs
bestIndex = i
p.children = savechildren
leaves[bestIndex].parent.children = []
c = self.critereCout(currentTree)
print("c :", c)
print("cOpt :", cOpt)
if c < cOpt:
print("--------------------------------------------- c :", c)
cOpt = c
tOpt = self.copy(currentTree)
return tOpt
def classify(self, data):
root = Region1(self, 1, np.inf, elements=data)
def divideRecursivelly(region):
if region.getSize() > self.minSize:
print(".................")
r1, r2 = self.bestSeparation(region)
if r1:
r1.parent = region
divideRecursivelly(r1)
r2.parent = region
divideRecursivelly(r2)
divideRecursivelly(root)
root = self.elager(root)
self.root = root
return root
def evaluate(self, elt):
def recEvaluate(region):
if not region.children:
return region
else:
r1, r2 = region.children
if elt[r1.j] <= r1.s:
return recEvaluate(r1)
else:
return recEvaluate(r2)
sol = recEvaluate(self.root)
norm1 = self.norm(elt - sol.value)
norm2 = self.norm(sol.value)
print("region : ", sol, " acc : ", 1 - norm1 / norm2)
return sol
def evaluate2(self, elements):
solList = []
loss = 0
def recEvaluate(elt, region):
if not region.children:
return region
else:
r1, r2 = region.children
if elt[r1.j] <= r1.s:
return recEvaluate(elt, r1)
else:
return recEvaluate(elt, r2)
for elt in elements:
sol = recEvaluate(elt, self.root)
solList.append(sol.value)
loss += self.norm(self.eval(elt) - sol.value)
loss = loss / len(elements)
print("loss : ", loss)
return solList, loss | modelTree.py | import numpy as np
from region import Region1, Region2
class Model():
def __init__(self, minSize, eval, norm, alpha):
self.minSize = minSize
self.axis = None
self.subAxis = None
self.root = None
self.eval = eval
self.norm = norm
self.alpha = alpha
def bestSeparation(self, parentRegion):
opt = np.inf
jOpt = None
sOpt = None
for j in range(len(parentRegion.getPopulation()[0])):
svalues = np.sort(parentRegion.getPopulation()[:, j])
maxs = max(svalues)
n = len(svalues)
while n > 0 and svalues[-1] == maxs:
svalues = svalues[:-1]
n -= 1
for s in svalues:
r1 = Region1(self, j, s, elements=parentRegion.getPopulation())
r2 = Region2(self, j, s, elements=parentRegion.getPopulation())
loss = r1.getLoss(r1.value) + r2.getLoss(r2.value)
if loss < opt:
opt = loss
jOpt = j
sOpt = s
if jOpt != None:
r1Opt = Region1(self, jOpt, sOpt, elements=parentRegion.getPopulation())
r2Opt = Region2(self, jOpt, sOpt, elements=parentRegion.getPopulation())
return r1Opt, r2Opt
else:
return None, None
def critereCout(self, t, firstTerm=False):
leaves = t.getLeaves()
s = 0
n = len(leaves)
print("n :", n)
for node in leaves:
s += node.getLoss(node.value)
if not firstTerm:
return s + self.alpha * n
else:
return s
def copy(self, region):
if type(region) == Region1:
r = Region1(self, region.j, region.s, elements=region.getPopulation())
else:
r = Region2(self, region.j, region.s, elements=region.getPopulation())
if not region.is_leaf:
for node in region.children:
newnode = self.copy(node)
newnode.parent = r
return r
def elager(self, t):
tOpt = self.copy(t)
currentTree = self.copy(t)
cOpt = self.critereCout(t)
while not currentTree.is_leaf:
leaves = currentTree.getLeaves()
firstTerm = np.inf
bestIndex = 0
for i in range(0, len(leaves), 2):
p = leaves[i].parent
savechildren = [node for node in p.children]
p.children = []
fs = self.critereCout(currentTree, firstTerm=True)
if fs < firstTerm:
firstTerm = fs
bestIndex = i
p.children = savechildren
leaves[bestIndex].parent.children = []
c = self.critereCout(currentTree)
print("c :", c)
print("cOpt :", cOpt)
if c < cOpt:
print("--------------------------------------------- c :", c)
cOpt = c
tOpt = self.copy(currentTree)
return tOpt
def classify(self, data):
root = Region1(self, 1, np.inf, elements=data)
def divideRecursivelly(region):
if region.getSize() > self.minSize:
print(".................")
r1, r2 = self.bestSeparation(region)
if r1:
r1.parent = region
divideRecursivelly(r1)
r2.parent = region
divideRecursivelly(r2)
divideRecursivelly(root)
root = self.elager(root)
self.root = root
return root
def evaluate(self, elt):
def recEvaluate(region):
if not region.children:
return region
else:
r1, r2 = region.children
if elt[r1.j] <= r1.s:
return recEvaluate(r1)
else:
return recEvaluate(r2)
sol = recEvaluate(self.root)
norm1 = self.norm(elt - sol.value)
norm2 = self.norm(sol.value)
print("region : ", sol, " acc : ", 1 - norm1 / norm2)
return sol
def evaluate2(self, elements):
solList = []
loss = 0
def recEvaluate(elt, region):
if not region.children:
return region
else:
r1, r2 = region.children
if elt[r1.j] <= r1.s:
return recEvaluate(elt, r1)
else:
return recEvaluate(elt, r2)
for elt in elements:
sol = recEvaluate(elt, self.root)
solList.append(sol.value)
loss += self.norm(self.eval(elt) - sol.value)
loss = loss / len(elements)
print("loss : ", loss)
return solList, loss | 0.342132 | 0.238916 |
import logging
import time
from libmproxy.protocol.http import decoded
import re
from libmproxy.protocol.http import HTTPResponse
from netlib.odict import ODictCaseless
class client_status_handler:
def __init__ (self):
self.db_file_name = 'cert-db.dat'
def get_db (self):
cert_db_file = open(self.db_file_name, 'r')
cert_db = cert_db_file.readlines()
cert_db_file.close()
return cert_db
def add_to_db (self, client_ip):
with open(self.db_file_name, 'a') as cert_db_file:
cert_db_file.write(client_ip + " no\n")
def change_status (self, client_ip, status):
cert_db = self.get_db()
cert_db_file = open(self.db_file_name, 'w')
for i in range(len(cert_db)):
if (((cert_db[i]).split())[0] == client_ip):
cert_db_file.write(cert_db[i][:-3] + status + "\n")
else:
cert_db_file.write(cert_db[i])
def check_status (self, client_ip):
cert_db = self.get_db()
status = "no"
for i in range(len(cert_db)):
if (((cert_db[i]).split())[0] == client_ip):
status = ((cert_db[i]).split())[1]
return status
self.add_to_db(client_ip)
return status
#=============================================
csh = client_status_handler()
def response(context, flow):
try:
logging.debug("response")
logging.debug(flow.request.host)
logging.debug(flow.request.pretty_host(hostheader=True))
logging.debug(flow.request.path)
logging.debug(flow.server_conn.ssl_established)
# logging.debug(flow.request.scheme)
# client_status = csh.check_status(str(flow.client_conn.address).split("'")[1])
# skip = flow.request.host == "192.168.3.11"
# skip = skip or flow.request.host == "192.168.3.11"
# logging.debug(flow.request.host)
# if (not skip and client_status == "dl" and not flow.server_conn.ssl_established):
# logging.debug("replay")
# logging.debug(flow.request.pretty_host(hostheader=True))
# context.kill_flow(flow)
# logging.debug("replay")
# with open("cert.txt", "r") as cert_file:
# cert_str = cert_file.read()
# resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), cert_str)
# flow.reply(resp)
# csh.change_status(str(flow.client_conn.address).split("'")[1], "dl")
# f = context.duplicate_flow(flow)
# f.request.host = "infosec-216.github.io"
# f.request.update_host_header()
# context.replay_request(f)
# else:
# csh.change_status(str(flow.client_conn.address).split("'")[1], "in")
# with decoded(flow.response):
# if ('text/html' in flow.response.headers["content-type"][0]):
# flow.response.headers["content-type"] = ["text/html; charset=uft-8"]
# with open("cert.txt", "r") as cert_file:
# cert_str = cert_file.read()
# flow.response.content = cert_str
# csh.change_status(str(flow.client_conn.address).split("'")[1], "dl")
# if (client_status == "dl"):
# with decoded(flow.response):
# if ('text/html' in flow.response.headers["content-type"][0]):
# flow.response.headers["content-type"] = ["text/html; charset=uft-8"]
# with open("hello-html.txt", "r") as cert_file:
# cert_str = cert_file.read()
# flow.response.content = cert_str
# csh.change_status(str(flow.client_conn.address).split("'")[1], "in")
logging.debug("=======================================================")
except Exception as e:
logging.debug("CHECK CODE, IDIOT!!!!!!!!!!!")
logging.debug(type(e))
logging.debug(e)
def request(context, flow):
try:
logging.debug("request")
logging.debug(flow.request.host)
logging.debug(flow.request.pretty_host(hostheader=True))
logging.debug(flow.request.path)
client_status = csh.check_status(str(flow.client_conn.address).split("'")[1])
if (client_status == "no"):
to_trust_https = flow.request.pretty_host(hostheader=True) == "www.google.com"
logging.debug("to_trust_https " + str(to_trust_https))
logging.debug("ssl_established " + str(flow.server_conn.ssl_established))
if (to_trust_https and flow.server_conn.ssl_established):
csh.change_status(str(flow.client_conn.address).split("'")[1], "in")
client_status = "in"
skip = flow.request.path == "/mipt-telecom.p12"
skip = skip or flow.request.path == "/mipt-telecom.pem"
logging.debug("skip " + str(skip))
if (client_status == "no" and not skip and not flow.server_conn.ssl_established):
with open("cert.txt", "r") as cert_file:
cert_str = cert_file.read()
resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), cert_str)
flow.reply(resp)
# flow.request.host = "infosec-216.github.io"
# flow.request.update_host_header()
# csh.change_status(str(flow.client_conn.address).split("'")[1], "dl")
logging.debug(flow.server_conn.ssl_established)
except Exception as e:
logging.debug("CHECK CODE, IDIOT!!!!!!!!!!!")
logging.debug(type(e))
logging.debug(e)
def start (context, argv):
logging.basicConfig(filename="log.log",level=logging.DEBUG)
logging.debug("============================================\n")
logging.debug(time.time())
logging.debug("Startup:\n") | ssl-redirect.py | import logging
import time
from libmproxy.protocol.http import decoded
import re
from libmproxy.protocol.http import HTTPResponse
from netlib.odict import ODictCaseless
class client_status_handler:
def __init__ (self):
self.db_file_name = 'cert-db.dat'
def get_db (self):
cert_db_file = open(self.db_file_name, 'r')
cert_db = cert_db_file.readlines()
cert_db_file.close()
return cert_db
def add_to_db (self, client_ip):
with open(self.db_file_name, 'a') as cert_db_file:
cert_db_file.write(client_ip + " no\n")
def change_status (self, client_ip, status):
cert_db = self.get_db()
cert_db_file = open(self.db_file_name, 'w')
for i in range(len(cert_db)):
if (((cert_db[i]).split())[0] == client_ip):
cert_db_file.write(cert_db[i][:-3] + status + "\n")
else:
cert_db_file.write(cert_db[i])
def check_status (self, client_ip):
cert_db = self.get_db()
status = "no"
for i in range(len(cert_db)):
if (((cert_db[i]).split())[0] == client_ip):
status = ((cert_db[i]).split())[1]
return status
self.add_to_db(client_ip)
return status
#=============================================
csh = client_status_handler()
def response(context, flow):
try:
logging.debug("response")
logging.debug(flow.request.host)
logging.debug(flow.request.pretty_host(hostheader=True))
logging.debug(flow.request.path)
logging.debug(flow.server_conn.ssl_established)
# logging.debug(flow.request.scheme)
# client_status = csh.check_status(str(flow.client_conn.address).split("'")[1])
# skip = flow.request.host == "192.168.3.11"
# skip = skip or flow.request.host == "192.168.3.11"
# logging.debug(flow.request.host)
# if (not skip and client_status == "dl" and not flow.server_conn.ssl_established):
# logging.debug("replay")
# logging.debug(flow.request.pretty_host(hostheader=True))
# context.kill_flow(flow)
# logging.debug("replay")
# with open("cert.txt", "r") as cert_file:
# cert_str = cert_file.read()
# resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), cert_str)
# flow.reply(resp)
# csh.change_status(str(flow.client_conn.address).split("'")[1], "dl")
# f = context.duplicate_flow(flow)
# f.request.host = "infosec-216.github.io"
# f.request.update_host_header()
# context.replay_request(f)
# else:
# csh.change_status(str(flow.client_conn.address).split("'")[1], "in")
# with decoded(flow.response):
# if ('text/html' in flow.response.headers["content-type"][0]):
# flow.response.headers["content-type"] = ["text/html; charset=uft-8"]
# with open("cert.txt", "r") as cert_file:
# cert_str = cert_file.read()
# flow.response.content = cert_str
# csh.change_status(str(flow.client_conn.address).split("'")[1], "dl")
# if (client_status == "dl"):
# with decoded(flow.response):
# if ('text/html' in flow.response.headers["content-type"][0]):
# flow.response.headers["content-type"] = ["text/html; charset=uft-8"]
# with open("hello-html.txt", "r") as cert_file:
# cert_str = cert_file.read()
# flow.response.content = cert_str
# csh.change_status(str(flow.client_conn.address).split("'")[1], "in")
logging.debug("=======================================================")
except Exception as e:
logging.debug("CHECK CODE, IDIOT!!!!!!!!!!!")
logging.debug(type(e))
logging.debug(e)
def request(context, flow):
try:
logging.debug("request")
logging.debug(flow.request.host)
logging.debug(flow.request.pretty_host(hostheader=True))
logging.debug(flow.request.path)
client_status = csh.check_status(str(flow.client_conn.address).split("'")[1])
if (client_status == "no"):
to_trust_https = flow.request.pretty_host(hostheader=True) == "www.google.com"
logging.debug("to_trust_https " + str(to_trust_https))
logging.debug("ssl_established " + str(flow.server_conn.ssl_established))
if (to_trust_https and flow.server_conn.ssl_established):
csh.change_status(str(flow.client_conn.address).split("'")[1], "in")
client_status = "in"
skip = flow.request.path == "/mipt-telecom.p12"
skip = skip or flow.request.path == "/mipt-telecom.pem"
logging.debug("skip " + str(skip))
if (client_status == "no" and not skip and not flow.server_conn.ssl_established):
with open("cert.txt", "r") as cert_file:
cert_str = cert_file.read()
resp = HTTPResponse([1, 1], 200, "OK", ODictCaseless([["Content-Type", "text/html"]]), cert_str)
flow.reply(resp)
# flow.request.host = "infosec-216.github.io"
# flow.request.update_host_header()
# csh.change_status(str(flow.client_conn.address).split("'")[1], "dl")
logging.debug(flow.server_conn.ssl_established)
except Exception as e:
logging.debug("CHECK CODE, IDIOT!!!!!!!!!!!")
logging.debug(type(e))
logging.debug(e)
def start (context, argv):
logging.basicConfig(filename="log.log",level=logging.DEBUG)
logging.debug("============================================\n")
logging.debug(time.time())
logging.debug("Startup:\n") | 0.222447 | 0.057945 |
import functools
import flask
import structure
import utils
blueprint = flask.Blueprint("user", __name__) # pylint: disable=invalid-name
PERMISSIONS = {
"DATA_EDIT": ("DATA_EDIT", "USER_ADD", "USER_SEARCH"),
"OWNERS_READ": ("OWNERS_READ",),
"USER_ADD": ("USER_ADD",),
"USER_SEARCH": ("USER_SEARCH",),
"USER_MANAGEMENT": ("USER_MANAGEMENT", "USER_ADD", "USER_SEARCH"),
"DATA_MANAGEMENT": ("DATA_EDIT", "OWNERS_READ", "DATA_MANAGEMENT"),
}
# Decorators
def login_required(func):
"""
Confirm that the user is logged in.
Otherwise abort with status 401 Unauthorized.
"""
@functools.wraps(func)
def wrap(*args, **kwargs):
if not flask.g.current_user:
flask.abort(status=401)
return func(*args, **kwargs)
return wrap
# requests
@blueprint.route("/permissions")
def get_permission_info():
"""Get a list of all permission types."""
return utils.response_json({"permissions": list(PERMISSIONS.keys())})
@blueprint.route("")
def list_users():
"""List all users."""
perm_status = utils.req_check_permissions(["USER_SEARCH"])
if perm_status != 200:
flask.abort(status=perm_status)
fields = {"api_key": 0, "api_salt": 0}
if not utils.req_has_permission("USER_MANAGEMENT"):
fields["auth_ids"] = 0
fields["permissions"] = 0
result = tuple(flask.g.db["users"].find(projection=fields))
return utils.response_json({"users": result})
# requests
@blueprint.route("/me")
def get_current_user_info():
"""
List basic information about the current user.
Returns:
flask.Response: json structure for the user
"""
data = flask.g.current_user
outstructure = {
"_id": "",
"affiliation": "",
"auth_ids": [],
"email": "",
"contact": "",
"name": "",
"orcid": "",
"permissions": [],
"url": "",
}
if data:
for field in outstructure:
outstructure[field] = data[field]
outstructure["permissions"] = utils.prepare_permissions(outstructure["permissions"])
return utils.response_json({"user": outstructure})
# requests
@blueprint.route("/<identifier>/apikey", methods=["POST"])
@login_required
def gen_new_api_key(identifier: str = None):
"""
Generate a new API key for the provided or current user.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: The new API key
"""
if identifier != flask.g.current_user["_id"]:
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_data = utils.req_get_entry("users", identifier)
if not user_data:
flask.abort(status=404)
apikey = utils.gen_api_key()
new_hash = utils.gen_api_key_hash(apikey.key, apikey.salt)
new_values = {"api_key": new_hash, "api_salt": apikey.salt}
user_data.update(new_values)
result = flask.g.db["users"].update_one({"_id": identifier}, {"$set": new_values})
if not result.acknowledged:
flask.current_app.logger.error("Updating API key for user %s failed", identifier)
flask.Response(status=500)
else:
utils.make_log("user", "edit", "New API key", user_data)
return utils.response_json({"key": apikey.key})
@blueprint.route("/<identifier>", methods=["GET"])
def get_user_data(identifier: str):
"""
Get information about a user.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Information about the user as json.
"""
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_info = utils.req_get_entry("users", identifier)
if not user_info:
flask.abort(status=404)
# The hash and salt should never leave the system
del user_info["api_key"]
del user_info["api_salt"]
user_info["permissions"] = utils.prepare_permissions(user_info["permissions"])
return utils.response_json({"user": user_info})
@blueprint.route("", methods=["POST"])
def add_user():
"""
Add a user.
Returns:
flask.Response: Information about the user as json.
"""
perm_status = utils.req_check_permissions(["USER_ADD"])
if perm_status != 200:
flask.abort(status=perm_status)
new_user = structure.user()
jsondata = flask.request.json
if not jsondata.get("user") or not isinstance(jsondata["user"], dict):
flask.abort(status=400)
indata = jsondata["user"]
validation = utils.basic_check_indata(
indata, new_user, ("_id", "api_key", "api_salt", "auth_ids")
)
if not validation.result:
flask.abort(status=validation.status)
indata = utils.prepare_for_db(indata)
if not indata:
flask.abort(status=400)
if "email" not in indata:
flask.current_app.logger.debug("Email must be set")
flask.abort(status=400)
old_user = flask.g.db["users"].find_one({"email": indata["email"]})
if old_user:
flask.current_app.logger.debug("User already exists")
flask.abort(status=400)
if not utils.req_has_permission("USER_MANAGEMENT") and "permissions" in indata:
flask.current_app.logger.debug("USER_MANAGEMENT required for permissions")
flask.abort(403)
new_user.update(indata)
new_user["auth_ids"] = [new_user["email"]]
result = utils.req_commit_to_db("users", "add", new_user)
if not result.log or not result.data:
flask.abort(status=500)
return utils.response_json({"_id": result.ins_id})
@blueprint.route("/<identifier>", methods=["DELETE"])
def delete_user(identifier: str):
"""
Delete a user.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Response code.
"""
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_info = utils.req_get_entry("users", identifier)
if not user_info:
flask.abort(status=404)
result = utils.req_commit_to_db("users", "delete", {"_id": identifier})
if not result.log or not result.data:
flask.abort(status=500)
return flask.Response(status=200)
@blueprint.route("/me", methods=["PATCH"])
@login_required
def update_current_user_info():
"""
Update the information about the current user.
Returns:
flask.Response: Response code.
"""
user_data = flask.g.current_user
jsondata = flask.request.json
if not jsondata.get("user") or not isinstance(jsondata["user"], dict):
flask.abort(status=400)
indata = jsondata["user"]
validation = utils.basic_check_indata(
indata,
user_data,
("_id", "api_key", "api_salt", "auth_ids", "email", "permissions"),
)
if not validation.result:
flask.abort(status=validation.status)
is_different = False
for field in indata:
if indata[field] != user_data[field]:
is_different = True
break
user_data.update(indata)
if is_different:
result = utils.req_commit_to_db("users", "edit", user_data)
if not result.log or not result.data:
flask.abort(status=500)
return flask.Response(status=200)
@blueprint.route("/<identifier>", methods=["PATCH"])
def update_user_info(identifier: str):
"""
Update the information about a user.
Requires USER_MANAGEMENT.
Args:
identifier (str): The uuid of the user to modify.
Returns:
flask.Response: Response code.
"""
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_data = utils.req_get_entry("users", identifier)
if not user_data:
flask.abort(status=404)
jsondata = flask.request.json
if not jsondata.get("user") or not isinstance(jsondata["user"], dict):
flask.abort(status=400)
indata = jsondata["user"]
validation = utils.basic_check_indata(
indata, user_data, ("_id", "api_key", "api_salt", "auth_ids")
)
if not validation.result:
flask.abort(status=validation.status)
if "email" in indata:
old_user = flask.g.db["users"].find_one({"email": indata["email"]})
if old_user and old_user.get("_id") != user_data["_id"]:
flask.current_app.logger.debug("User already exists")
flask.abort(status=409)
# Avoid "updating" and making log if there are no changes
is_different = False
for field in indata:
if indata[field] != user_data[field]:
is_different = True
break
user_data.update(indata)
if is_different:
result = utils.req_commit_to_db("users", "edit", user_data)
if not result.log or not result.data:
flask.abort(status=500)
return flask.Response(status=200)
@blueprint.route("/<identifier>/log", methods=["GET"])
@login_required
def get_user_log(identifier: str):
"""
Get change logs for the user entry with uuid ``identifier``.
Can be accessed by actual user and admin (USER_MANAGEMENT).
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Information about the user as json.
"""
if identifier != (flask.g.current_user["_id"] or None):
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_logs = list(flask.g.db["logs"].find({"data_type": "user", "data._id": identifier}))
for log in user_logs:
del log["data_type"]
utils.incremental_logs(user_logs)
for i in range(len(user_logs)):
for key in ("api_key", "api_salt"):
if key in user_logs[i]["data"]:
user_logs[i]["data"][key] = "<hidden>"
return utils.response_json({"entry_id": identifier, "data_type": "user", "logs": user_logs})
@blueprint.route("/<identifier>/actions", methods=["GET"])
@login_required
def get_user_actions(identifier: str):
"""
Get a list of actions (changes) by the user entry with ``identifier``.
Can be accessed by actual user and USER_MANAGEMENT.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Information about the user as json.
"""
if identifier != (flask.g.current_user["_id"] or None):
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
# only report a list of actions, not the actual data
user_logs = list(flask.g.db["logs"].find({"user": identifier}, {"user": 0}))
for entry in user_logs:
entry["entry_id"] = entry["data"]["_id"]
del entry["data"]
return utils.response_json({"logs": user_logs})
# helper functions
def add_new_user(user_info: dict):
"""
Add a new user to the database from first oidc login.
First check if user with the same email exists.
If so, add the auth_id to the user.
Args:
user_info (dict): Information about the user
"""
db_user = flask.g.db["users"].find_one({"email": user_info["email"]})
if db_user:
db_user["auth_ids"].append(user_info["auth_id"])
result = flask.g.db["users"].update_one(
{"email": user_info["email"]}, {"$set": {"auth_ids": db_user["auth_ids"]}}
)
if not result.acknowledged:
flask.current_app.logger.error(
"Failed to add new auth_id to user with email %s", user_info["email"]
)
flask.Response(status=500)
else:
utils.make_log("user", "edit", "Add OIDC entry to auth_ids", db_user, no_user=True)
else:
new_user = structure.user()
new_user["email"] = user_info["email"]
new_user["name"] = user_info["name"]
new_user["auth_ids"] = [user_info["auth_id"]]
result = flask.g.db["users"].insert_one(new_user)
if not result.acknowledged:
flask.current_app.logger.error(
"Failed to add user with email %s via oidc", user_info["email"]
)
flask.Response(status=500)
else:
utils.make_log("user", "add", "Creating new user from OAuth", new_user, no_user=True)
def do_login(auth_id: str):
"""
Set all relevant variables for a logged in user.
Args:
auth_id (str): Authentication id for the user.
Returns bool: Whether the login succeeded.
"""
user = flask.g.db["users"].find_one({"auth_ids": auth_id})
if not user:
return False
flask.session["user_id"] = user["_id"]
flask.session.permanent = True # pylint: disable=assigning-non-slot
return True
def get_current_user():
"""
Get the current user.
Returns:
dict: The current user.
"""
return get_user(user_uuid=flask.session.get("user_id"))
def get_user(user_uuid=None):
"""
Get information about the user.
Args:
user_uuid (str): The identifier (uuid) of the user.
Returns:
dict: The current user.
"""
if user_uuid:
user = flask.g.db["users"].find_one({"_id": user_uuid})
if user:
return user
return None | backend/user.py | import functools
import flask
import structure
import utils
blueprint = flask.Blueprint("user", __name__) # pylint: disable=invalid-name
PERMISSIONS = {
"DATA_EDIT": ("DATA_EDIT", "USER_ADD", "USER_SEARCH"),
"OWNERS_READ": ("OWNERS_READ",),
"USER_ADD": ("USER_ADD",),
"USER_SEARCH": ("USER_SEARCH",),
"USER_MANAGEMENT": ("USER_MANAGEMENT", "USER_ADD", "USER_SEARCH"),
"DATA_MANAGEMENT": ("DATA_EDIT", "OWNERS_READ", "DATA_MANAGEMENT"),
}
# Decorators
def login_required(func):
"""
Confirm that the user is logged in.
Otherwise abort with status 401 Unauthorized.
"""
@functools.wraps(func)
def wrap(*args, **kwargs):
if not flask.g.current_user:
flask.abort(status=401)
return func(*args, **kwargs)
return wrap
# requests
@blueprint.route("/permissions")
def get_permission_info():
"""Get a list of all permission types."""
return utils.response_json({"permissions": list(PERMISSIONS.keys())})
@blueprint.route("")
def list_users():
"""List all users."""
perm_status = utils.req_check_permissions(["USER_SEARCH"])
if perm_status != 200:
flask.abort(status=perm_status)
fields = {"api_key": 0, "api_salt": 0}
if not utils.req_has_permission("USER_MANAGEMENT"):
fields["auth_ids"] = 0
fields["permissions"] = 0
result = tuple(flask.g.db["users"].find(projection=fields))
return utils.response_json({"users": result})
# requests
@blueprint.route("/me")
def get_current_user_info():
"""
List basic information about the current user.
Returns:
flask.Response: json structure for the user
"""
data = flask.g.current_user
outstructure = {
"_id": "",
"affiliation": "",
"auth_ids": [],
"email": "",
"contact": "",
"name": "",
"orcid": "",
"permissions": [],
"url": "",
}
if data:
for field in outstructure:
outstructure[field] = data[field]
outstructure["permissions"] = utils.prepare_permissions(outstructure["permissions"])
return utils.response_json({"user": outstructure})
# requests
@blueprint.route("/<identifier>/apikey", methods=["POST"])
@login_required
def gen_new_api_key(identifier: str = None):
"""
Generate a new API key for the provided or current user.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: The new API key
"""
if identifier != flask.g.current_user["_id"]:
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_data = utils.req_get_entry("users", identifier)
if not user_data:
flask.abort(status=404)
apikey = utils.gen_api_key()
new_hash = utils.gen_api_key_hash(apikey.key, apikey.salt)
new_values = {"api_key": new_hash, "api_salt": apikey.salt}
user_data.update(new_values)
result = flask.g.db["users"].update_one({"_id": identifier}, {"$set": new_values})
if not result.acknowledged:
flask.current_app.logger.error("Updating API key for user %s failed", identifier)
flask.Response(status=500)
else:
utils.make_log("user", "edit", "New API key", user_data)
return utils.response_json({"key": apikey.key})
@blueprint.route("/<identifier>", methods=["GET"])
def get_user_data(identifier: str):
"""
Get information about a user.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Information about the user as json.
"""
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_info = utils.req_get_entry("users", identifier)
if not user_info:
flask.abort(status=404)
# The hash and salt should never leave the system
del user_info["api_key"]
del user_info["api_salt"]
user_info["permissions"] = utils.prepare_permissions(user_info["permissions"])
return utils.response_json({"user": user_info})
@blueprint.route("", methods=["POST"])
def add_user():
"""
Add a user.
Returns:
flask.Response: Information about the user as json.
"""
perm_status = utils.req_check_permissions(["USER_ADD"])
if perm_status != 200:
flask.abort(status=perm_status)
new_user = structure.user()
jsondata = flask.request.json
if not jsondata.get("user") or not isinstance(jsondata["user"], dict):
flask.abort(status=400)
indata = jsondata["user"]
validation = utils.basic_check_indata(
indata, new_user, ("_id", "api_key", "api_salt", "auth_ids")
)
if not validation.result:
flask.abort(status=validation.status)
indata = utils.prepare_for_db(indata)
if not indata:
flask.abort(status=400)
if "email" not in indata:
flask.current_app.logger.debug("Email must be set")
flask.abort(status=400)
old_user = flask.g.db["users"].find_one({"email": indata["email"]})
if old_user:
flask.current_app.logger.debug("User already exists")
flask.abort(status=400)
if not utils.req_has_permission("USER_MANAGEMENT") and "permissions" in indata:
flask.current_app.logger.debug("USER_MANAGEMENT required for permissions")
flask.abort(403)
new_user.update(indata)
new_user["auth_ids"] = [new_user["email"]]
result = utils.req_commit_to_db("users", "add", new_user)
if not result.log or not result.data:
flask.abort(status=500)
return utils.response_json({"_id": result.ins_id})
@blueprint.route("/<identifier>", methods=["DELETE"])
def delete_user(identifier: str):
"""
Delete a user.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Response code.
"""
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_info = utils.req_get_entry("users", identifier)
if not user_info:
flask.abort(status=404)
result = utils.req_commit_to_db("users", "delete", {"_id": identifier})
if not result.log or not result.data:
flask.abort(status=500)
return flask.Response(status=200)
@blueprint.route("/me", methods=["PATCH"])
@login_required
def update_current_user_info():
"""
Update the information about the current user.
Returns:
flask.Response: Response code.
"""
user_data = flask.g.current_user
jsondata = flask.request.json
if not jsondata.get("user") or not isinstance(jsondata["user"], dict):
flask.abort(status=400)
indata = jsondata["user"]
validation = utils.basic_check_indata(
indata,
user_data,
("_id", "api_key", "api_salt", "auth_ids", "email", "permissions"),
)
if not validation.result:
flask.abort(status=validation.status)
is_different = False
for field in indata:
if indata[field] != user_data[field]:
is_different = True
break
user_data.update(indata)
if is_different:
result = utils.req_commit_to_db("users", "edit", user_data)
if not result.log or not result.data:
flask.abort(status=500)
return flask.Response(status=200)
@blueprint.route("/<identifier>", methods=["PATCH"])
def update_user_info(identifier: str):
"""
Update the information about a user.
Requires USER_MANAGEMENT.
Args:
identifier (str): The uuid of the user to modify.
Returns:
flask.Response: Response code.
"""
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_data = utils.req_get_entry("users", identifier)
if not user_data:
flask.abort(status=404)
jsondata = flask.request.json
if not jsondata.get("user") or not isinstance(jsondata["user"], dict):
flask.abort(status=400)
indata = jsondata["user"]
validation = utils.basic_check_indata(
indata, user_data, ("_id", "api_key", "api_salt", "auth_ids")
)
if not validation.result:
flask.abort(status=validation.status)
if "email" in indata:
old_user = flask.g.db["users"].find_one({"email": indata["email"]})
if old_user and old_user.get("_id") != user_data["_id"]:
flask.current_app.logger.debug("User already exists")
flask.abort(status=409)
# Avoid "updating" and making log if there are no changes
is_different = False
for field in indata:
if indata[field] != user_data[field]:
is_different = True
break
user_data.update(indata)
if is_different:
result = utils.req_commit_to_db("users", "edit", user_data)
if not result.log or not result.data:
flask.abort(status=500)
return flask.Response(status=200)
@blueprint.route("/<identifier>/log", methods=["GET"])
@login_required
def get_user_log(identifier: str):
"""
Get change logs for the user entry with uuid ``identifier``.
Can be accessed by actual user and admin (USER_MANAGEMENT).
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Information about the user as json.
"""
if identifier != (flask.g.current_user["_id"] or None):
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
user_logs = list(flask.g.db["logs"].find({"data_type": "user", "data._id": identifier}))
for log in user_logs:
del log["data_type"]
utils.incremental_logs(user_logs)
for i in range(len(user_logs)):
for key in ("api_key", "api_salt"):
if key in user_logs[i]["data"]:
user_logs[i]["data"][key] = "<hidden>"
return utils.response_json({"entry_id": identifier, "data_type": "user", "logs": user_logs})
@blueprint.route("/<identifier>/actions", methods=["GET"])
@login_required
def get_user_actions(identifier: str):
"""
Get a list of actions (changes) by the user entry with ``identifier``.
Can be accessed by actual user and USER_MANAGEMENT.
Args:
identifier (str): The user identifier.
Returns:
flask.Response: Information about the user as json.
"""
if identifier != (flask.g.current_user["_id"] or None):
perm_status = utils.req_check_permissions(["USER_MANAGEMENT"])
if perm_status != 200:
flask.abort(status=perm_status)
# only report a list of actions, not the actual data
user_logs = list(flask.g.db["logs"].find({"user": identifier}, {"user": 0}))
for entry in user_logs:
entry["entry_id"] = entry["data"]["_id"]
del entry["data"]
return utils.response_json({"logs": user_logs})
# helper functions
def add_new_user(user_info: dict):
"""
Add a new user to the database from first oidc login.
First check if user with the same email exists.
If so, add the auth_id to the user.
Args:
user_info (dict): Information about the user
"""
db_user = flask.g.db["users"].find_one({"email": user_info["email"]})
if db_user:
db_user["auth_ids"].append(user_info["auth_id"])
result = flask.g.db["users"].update_one(
{"email": user_info["email"]}, {"$set": {"auth_ids": db_user["auth_ids"]}}
)
if not result.acknowledged:
flask.current_app.logger.error(
"Failed to add new auth_id to user with email %s", user_info["email"]
)
flask.Response(status=500)
else:
utils.make_log("user", "edit", "Add OIDC entry to auth_ids", db_user, no_user=True)
else:
new_user = structure.user()
new_user["email"] = user_info["email"]
new_user["name"] = user_info["name"]
new_user["auth_ids"] = [user_info["auth_id"]]
result = flask.g.db["users"].insert_one(new_user)
if not result.acknowledged:
flask.current_app.logger.error(
"Failed to add user with email %s via oidc", user_info["email"]
)
flask.Response(status=500)
else:
utils.make_log("user", "add", "Creating new user from OAuth", new_user, no_user=True)
def do_login(auth_id: str):
"""
Set all relevant variables for a logged in user.
Args:
auth_id (str): Authentication id for the user.
Returns bool: Whether the login succeeded.
"""
user = flask.g.db["users"].find_one({"auth_ids": auth_id})
if not user:
return False
flask.session["user_id"] = user["_id"]
flask.session.permanent = True # pylint: disable=assigning-non-slot
return True
def get_current_user():
"""
Get the current user.
Returns:
dict: The current user.
"""
return get_user(user_uuid=flask.session.get("user_id"))
def get_user(user_uuid=None):
"""
Get information about the user.
Args:
user_uuid (str): The identifier (uuid) of the user.
Returns:
dict: The current user.
"""
if user_uuid:
user = flask.g.db["users"].find_one({"_id": user_uuid})
if user:
return user
return None | 0.589244 | 0.117572 |
import os
import re
from typing import List
import numpy as np
from PIL import Image
from skimage import io
from tqdm import tqdm, trange
from src.path import OUT_DIR
def binary_to_uint8(array: np.ndarray) -> np.ndarray:
"""Converts an array of binary labels to a uint8.
Args:
array (np.ndarray): array of binary labels.
Returns:
np.ndarray: uint8 array.
"""
return (array * 255).round().astype(np.uint8)
def get_submission_lines(submission_filename: str) -> List[str]:
"""Returns the lines of a submission file.
Args:
submission_filename (str): path of the csv submission file.
Returns:
List[str]: lines of the submission file.
"""
with open(submission_filename, 'r') as f:
lines = f.readlines()
return lines
def submission_to_mask(submission_filename: str, image_id: int,
mask_filename: str = None,
w: int = 16, h: int = 16) -> np.ndarray:
"""Returns a mask from a submission file and its id.
Args:
submission_filename (str): submission csv file path.
image_id (int): image id.
mask_filename (str, optional): mask file path. Defaults to None.
w (int, optional): width. Defaults to 16.
h (int, optional): height. Defaults to 16.
Returns:
np.ndarray: mask.
"""
# Get submission lines
lines = get_submission_lines(submission_filename)
# Init image
img_width = int(np.ceil(600 / w) * w)
img_height = int(np.ceil(600 / h) * h)
im = np.zeros((img_width, img_height), dtype=np.uint8)
image_id_str = f'{image_id:03d}_'
# Fill image
for line in lines[1:]:
if image_id_str not in line:
continue
tokens = line.split(',')
id_, prediction = tokens[0], int(tokens[1])
tokens = id_.split('_')
i, j = int(tokens[1]), int(tokens[2])
je = min(j + w, img_width)
ie = min(i + h, img_height)
adata = np.zeros((w, h)) if prediction == 0 else np.ones((w, h))
im[j:je, i:ie] = binary_to_uint8(adata)
# Save mask
if mask_filename is not None:
Image.fromarray(im).save(mask_filename)
return im
def submission_to_masks(submission_filename: str, nb_masks: int = 50,
masks_dirname: str = None,
w: int = 16, h: int = 16) -> List[np.ndarray]:
"""Returns the list of masks corresponding to a submission.
Args:
submission_filename (str): submission csv file path.
nb_masks (int, optional): number of masks to create. Defaults to 50.
masks_dirname (str, optional): directory of masks saved as images.
Defaults to None.
w (int, optional): width. Defaults to 16.
h (int, optional): height. Defaults to 16.
Returns:
List[np.ndarray]: list of masks.
"""
masks = list()
# Create masks directory
if masks_dirname is not None and not os.path.exists(masks_dirname):
os.mkdir(masks_dirname)
# Create masks
for i in trange(nb_masks):
image_id = i + 1
if masks_dirname is not None:
mask_name = f'prediction_{image_id:03d}.png'
mask_filename = os.path.join(masks_dirname, mask_name)
else:
mask_filename = None
mask = submission_to_mask(submission_filename, image_id, mask_filename,
w, h)
masks.append(mask)
return masks
def patch_to_label(patch: np.ndarray,
foreground_threshold: float = 0.25) -> int:
"""Assigns a label to a patch.
Args:
patch (np.ndarray): patch.
foreground_threshold (float, optional): foreground_threshold.
Defaults to 0.25.
Returns:
int: 0 or 1.
"""
return int(np.mean(patch) > (foreground_threshold * 255))
def mask_to_submission_strings(mask_filename: str, patch_size: int = 16,
foreground_threshold: float = 0.25,
clean: bool = False):
"""Reads a single mask image and outputs the strings that should go into
the submission file.
Args:
mask_filename (str): mask file path.
patch_size (int, optional): patch size. Defaults to 16.
foreground_threshold (float, optional): foreground_threshold.
Defaults to 0.25.
clean (bool, optional): clean the patches by a neighbor method.
Defaults to False.
"""
mask_name = os.path.basename(mask_filename)
img_number = int(re.search(r"\d+", mask_name).group(0))
im = io.imread(os.path.join(OUT_DIR, 'submission', mask_filename))
# Create mask of patch 38x38
mask_patch = np.zeros(
shape=(im.shape[0] // patch_size, im.shape[1] // patch_size),
dtype=np.uint8,
)
for j in range(0, im.shape[1], patch_size):
for i in range(0, im.shape[0], patch_size):
patch = im[i:i + patch_size, j:j + patch_size]
label = patch_to_label(
patch=patch,
foreground_threshold=foreground_threshold,
)
mask_patch[j // patch_size, i // patch_size] = label
# Improve patches
if clean:
mask_patch_clean = np.copy(mask_patch)
for j in range(2, mask_patch.shape[1] - 2):
for i in range(2, mask_patch.shape[0] - 2):
label = mask_patch[j, i]
# If not road: ignore
if label == 0:
continue
if mask_patch[j - 2, i]:
mask_patch_clean[j - 1, i] = 1
if mask_patch[j, i - 2]:
mask_patch_clean[j, i - 1] = 1
if mask_patch[j + 2, i]:
mask_patch_clean[j + 1, i] = 1
if mask_patch[j, i + 2]:
mask_patch_clean[j, i + 1] = 1
mask_patch = mask_patch_clean
# Yield patches
for j in range(mask_patch.shape[1]):
for i in range(mask_patch.shape[0]):
label = mask_patch[j, i]
yield f'{img_number:03d}_{j * patch_size}_{i * patch_size},{label}'
def masks_to_submission(submission_filename: str,
masks_filenames: list, patch_size: int = 16,
foreground_threshold: float = 0.25,
clean: bool = False) -> None:
"""Creates a submission file from masks filenames.
Args:
submission_filename (str): submission csv file path.
masks_filenames (list): list of masks file paths.
patch_size (int, optional): patch size. Defaults to 16.
foreground_threshold (float, optional): foreground_threshold.
Defaults to 0.25.
clean (bool, optional): clean the patches by a neighbor method.
Defaults to False.
"""
with open(submission_filename, 'w') as f:
f.write('id,prediction\n')
for fn in tqdm(masks_filenames, desc='Create submission', unit='mask'):
f.writelines(f'{s}\n' for s in mask_to_submission_strings(
fn, patch_size, foreground_threshold=foreground_threshold,
clean=clean)) | src/submission.py | import os
import re
from typing import List
import numpy as np
from PIL import Image
from skimage import io
from tqdm import tqdm, trange
from src.path import OUT_DIR
def binary_to_uint8(array: np.ndarray) -> np.ndarray:
"""Converts an array of binary labels to a uint8.
Args:
array (np.ndarray): array of binary labels.
Returns:
np.ndarray: uint8 array.
"""
return (array * 255).round().astype(np.uint8)
def get_submission_lines(submission_filename: str) -> List[str]:
"""Returns the lines of a submission file.
Args:
submission_filename (str): path of the csv submission file.
Returns:
List[str]: lines of the submission file.
"""
with open(submission_filename, 'r') as f:
lines = f.readlines()
return lines
def submission_to_mask(submission_filename: str, image_id: int,
mask_filename: str = None,
w: int = 16, h: int = 16) -> np.ndarray:
"""Returns a mask from a submission file and its id.
Args:
submission_filename (str): submission csv file path.
image_id (int): image id.
mask_filename (str, optional): mask file path. Defaults to None.
w (int, optional): width. Defaults to 16.
h (int, optional): height. Defaults to 16.
Returns:
np.ndarray: mask.
"""
# Get submission lines
lines = get_submission_lines(submission_filename)
# Init image
img_width = int(np.ceil(600 / w) * w)
img_height = int(np.ceil(600 / h) * h)
im = np.zeros((img_width, img_height), dtype=np.uint8)
image_id_str = f'{image_id:03d}_'
# Fill image
for line in lines[1:]:
if image_id_str not in line:
continue
tokens = line.split(',')
id_, prediction = tokens[0], int(tokens[1])
tokens = id_.split('_')
i, j = int(tokens[1]), int(tokens[2])
je = min(j + w, img_width)
ie = min(i + h, img_height)
adata = np.zeros((w, h)) if prediction == 0 else np.ones((w, h))
im[j:je, i:ie] = binary_to_uint8(adata)
# Save mask
if mask_filename is not None:
Image.fromarray(im).save(mask_filename)
return im
def submission_to_masks(submission_filename: str, nb_masks: int = 50,
masks_dirname: str = None,
w: int = 16, h: int = 16) -> List[np.ndarray]:
"""Returns the list of masks corresponding to a submission.
Args:
submission_filename (str): submission csv file path.
nb_masks (int, optional): number of masks to create. Defaults to 50.
masks_dirname (str, optional): directory of masks saved as images.
Defaults to None.
w (int, optional): width. Defaults to 16.
h (int, optional): height. Defaults to 16.
Returns:
List[np.ndarray]: list of masks.
"""
masks = list()
# Create masks directory
if masks_dirname is not None and not os.path.exists(masks_dirname):
os.mkdir(masks_dirname)
# Create masks
for i in trange(nb_masks):
image_id = i + 1
if masks_dirname is not None:
mask_name = f'prediction_{image_id:03d}.png'
mask_filename = os.path.join(masks_dirname, mask_name)
else:
mask_filename = None
mask = submission_to_mask(submission_filename, image_id, mask_filename,
w, h)
masks.append(mask)
return masks
def patch_to_label(patch: np.ndarray,
foreground_threshold: float = 0.25) -> int:
"""Assigns a label to a patch.
Args:
patch (np.ndarray): patch.
foreground_threshold (float, optional): foreground_threshold.
Defaults to 0.25.
Returns:
int: 0 or 1.
"""
return int(np.mean(patch) > (foreground_threshold * 255))
def mask_to_submission_strings(mask_filename: str, patch_size: int = 16,
foreground_threshold: float = 0.25,
clean: bool = False):
"""Reads a single mask image and outputs the strings that should go into
the submission file.
Args:
mask_filename (str): mask file path.
patch_size (int, optional): patch size. Defaults to 16.
foreground_threshold (float, optional): foreground_threshold.
Defaults to 0.25.
clean (bool, optional): clean the patches by a neighbor method.
Defaults to False.
"""
mask_name = os.path.basename(mask_filename)
img_number = int(re.search(r"\d+", mask_name).group(0))
im = io.imread(os.path.join(OUT_DIR, 'submission', mask_filename))
# Create mask of patch 38x38
mask_patch = np.zeros(
shape=(im.shape[0] // patch_size, im.shape[1] // patch_size),
dtype=np.uint8,
)
for j in range(0, im.shape[1], patch_size):
for i in range(0, im.shape[0], patch_size):
patch = im[i:i + patch_size, j:j + patch_size]
label = patch_to_label(
patch=patch,
foreground_threshold=foreground_threshold,
)
mask_patch[j // patch_size, i // patch_size] = label
# Improve patches
if clean:
mask_patch_clean = np.copy(mask_patch)
for j in range(2, mask_patch.shape[1] - 2):
for i in range(2, mask_patch.shape[0] - 2):
label = mask_patch[j, i]
# If not road: ignore
if label == 0:
continue
if mask_patch[j - 2, i]:
mask_patch_clean[j - 1, i] = 1
if mask_patch[j, i - 2]:
mask_patch_clean[j, i - 1] = 1
if mask_patch[j + 2, i]:
mask_patch_clean[j + 1, i] = 1
if mask_patch[j, i + 2]:
mask_patch_clean[j, i + 1] = 1
mask_patch = mask_patch_clean
# Yield patches
for j in range(mask_patch.shape[1]):
for i in range(mask_patch.shape[0]):
label = mask_patch[j, i]
yield f'{img_number:03d}_{j * patch_size}_{i * patch_size},{label}'
def masks_to_submission(submission_filename: str,
masks_filenames: list, patch_size: int = 16,
foreground_threshold: float = 0.25,
clean: bool = False) -> None:
"""Creates a submission file from masks filenames.
Args:
submission_filename (str): submission csv file path.
masks_filenames (list): list of masks file paths.
patch_size (int, optional): patch size. Defaults to 16.
foreground_threshold (float, optional): foreground_threshold.
Defaults to 0.25.
clean (bool, optional): clean the patches by a neighbor method.
Defaults to False.
"""
with open(submission_filename, 'w') as f:
f.write('id,prediction\n')
for fn in tqdm(masks_filenames, desc='Create submission', unit='mask'):
f.writelines(f'{s}\n' for s in mask_to_submission_strings(
fn, patch_size, foreground_threshold=foreground_threshold,
clean=clean)) | 0.804098 | 0.444263 |
__all__ = ('KeepType',)
from .docs import has_docs
@has_docs
class KeepType:
"""
A decorator, what can be used to add features to an already existing class, by defining a new one, what will extend
the old one's functionality.
Note, that already existing attributes will not be overwritten and neither of the followingly named attributes
either:
- `__name__`
- `__qualname__`
- `__weakref__`
- `__dict__`
- `__slots__`
Attributes
----------
old_class : `type`
The old class to extend.
Class Attributes
----------------
_ignored_attr_names : `set` of `str`
Attribute names to ignore when extending.
"""
__slots__ = ('old_class',)
_ignored_attr_names = frozenset(('__name__', '__qualname__', '__weakref__', '__dict__', '__slots__', '__module__'))
@has_docs
def __new__(cls, old_class, *, new_class=None):
"""
Creates a new ``KeepType`` with given `old_class` to extend. Can be used as a decorator if `new_class`
parameter is not given.
Parameters
----------
old_class : `type`
The old class to extend.
new_class : `None`, `type` = `None`, Optional (Keyword only)
The new class to extend the old class's functionality with.
Returns
-------
obj : ``KeepType``, `type`
If only `old_class` attribute is given, then returns itself enabling using it as a decorator, but if
`new_class` is given as well, then returns the extended `old_class`.
"""
self = object.__new__(cls)
self.old_class = old_class
if new_class is None:
return self
return self(new_class)
@has_docs
def __call__(self, new_class):
"""
Calls the ``KeepType`` extending it's old ``.old_class`` with the new given `new_class`.
Parameters
----------
new_class : `type`
The new class to extend the old class's functionality with.
Returns
-------
old_class : `type`
The extended old class.
"""
old_class = self.old_class
ignored_attr_names = self._ignored_attr_names
for attribute_name in dir(new_class):
if attribute_name in ignored_attr_names:
continue
attribute_value = getattr(new_class, attribute_name)
if (attribute_name == '__doc__') and (attribute_value is None):
continue
if hasattr(object, attribute_name) and (attribute_value is getattr(object, attribute_name)):
continue
setattr(old_class, attribute_name, attribute_value)
return old_class | scarletio/utils/keep_type.py | __all__ = ('KeepType',)
from .docs import has_docs
@has_docs
class KeepType:
"""
A decorator, what can be used to add features to an already existing class, by defining a new one, what will extend
the old one's functionality.
Note, that already existing attributes will not be overwritten and neither of the followingly named attributes
either:
- `__name__`
- `__qualname__`
- `__weakref__`
- `__dict__`
- `__slots__`
Attributes
----------
old_class : `type`
The old class to extend.
Class Attributes
----------------
_ignored_attr_names : `set` of `str`
Attribute names to ignore when extending.
"""
__slots__ = ('old_class',)
_ignored_attr_names = frozenset(('__name__', '__qualname__', '__weakref__', '__dict__', '__slots__', '__module__'))
@has_docs
def __new__(cls, old_class, *, new_class=None):
"""
Creates a new ``KeepType`` with given `old_class` to extend. Can be used as a decorator if `new_class`
parameter is not given.
Parameters
----------
old_class : `type`
The old class to extend.
new_class : `None`, `type` = `None`, Optional (Keyword only)
The new class to extend the old class's functionality with.
Returns
-------
obj : ``KeepType``, `type`
If only `old_class` attribute is given, then returns itself enabling using it as a decorator, but if
`new_class` is given as well, then returns the extended `old_class`.
"""
self = object.__new__(cls)
self.old_class = old_class
if new_class is None:
return self
return self(new_class)
@has_docs
def __call__(self, new_class):
"""
Calls the ``KeepType`` extending it's old ``.old_class`` with the new given `new_class`.
Parameters
----------
new_class : `type`
The new class to extend the old class's functionality with.
Returns
-------
old_class : `type`
The extended old class.
"""
old_class = self.old_class
ignored_attr_names = self._ignored_attr_names
for attribute_name in dir(new_class):
if attribute_name in ignored_attr_names:
continue
attribute_value = getattr(new_class, attribute_name)
if (attribute_name == '__doc__') and (attribute_value is None):
continue
if hasattr(object, attribute_name) and (attribute_value is getattr(object, attribute_name)):
continue
setattr(old_class, attribute_name, attribute_value)
return old_class | 0.778523 | 0.322886 |
from Tkinter import *
import os
import subprocess
import tkFont
import tkMessageBox
import ScrolledText
import time
def portlist():
os.system("sudo ufw status > portlist.txt")
return subprocess.check_output("cat /home/ubuntu1/portlist.txt", shell=True)
class MainWindow:
def __init__(self):
self.mw = Tk()
self.update_btn = Button(self.mw)
self.allow_btn = Button(self.mw)
self.deny_btn = Button(self.mw)
self.textarea = ScrolledText.ScrolledText(self.mw)
self.allow_tf = Entry(self.mw)
self.deny_tf = Entry(self.mw)
def init(self):
self.mw.title("Firewall Linux")
self.mw.geometry("500x420")
self.update_btn.place(x=300, y=10)
self.update_btn.config(
text="Actualizar lista de puertos",
command=self.__on_update
)
self.textarea.config(
width=60,
height=20,
state=DISABLED
)
self.textarea.place(x=10, y=60)
self.allow_tf.width=10
self.allow_tf.place(x=10, y=10)
self.allow_btn.config(
text="Allow",
command=self.__on_allow
)
self.allow_btn.place(
x=200,
y=10
)
self.deny_tf.width=5
self.deny_tf.place(x=10, y=360)
self.deny_btn.config(
text="Deny",
command=self.__on_deny
)
self.deny_btn.place(
x=200,
y=360
)
self.mw.mainloop()
def __on_update(self):
value = portlist()
self.textarea.config(state=NORMAL)
self.textarea.delete("1.0", "end")
self.textarea.insert(INSERT, value)
self.textarea.config(state=DISABLED)
def __on_allow(self):
port_value = self.allow_tf.get()
cmd1 = "sudo ufw allow "
cmd = (str(cmd1) + str(port_value))
os.system(cmd)
def __on_deny(self):
port_value = self.deny_tf.get()
cmd1 = "sudo ufw deny "
cmd = (str(cmd1) + str(port_value))
os.system(cmd)
mw = MainWindow()
mw.init() | exam/exam.py |
from Tkinter import *
import os
import subprocess
import tkFont
import tkMessageBox
import ScrolledText
import time
def portlist():
os.system("sudo ufw status > portlist.txt")
return subprocess.check_output("cat /home/ubuntu1/portlist.txt", shell=True)
class MainWindow:
def __init__(self):
self.mw = Tk()
self.update_btn = Button(self.mw)
self.allow_btn = Button(self.mw)
self.deny_btn = Button(self.mw)
self.textarea = ScrolledText.ScrolledText(self.mw)
self.allow_tf = Entry(self.mw)
self.deny_tf = Entry(self.mw)
def init(self):
self.mw.title("Firewall Linux")
self.mw.geometry("500x420")
self.update_btn.place(x=300, y=10)
self.update_btn.config(
text="Actualizar lista de puertos",
command=self.__on_update
)
self.textarea.config(
width=60,
height=20,
state=DISABLED
)
self.textarea.place(x=10, y=60)
self.allow_tf.width=10
self.allow_tf.place(x=10, y=10)
self.allow_btn.config(
text="Allow",
command=self.__on_allow
)
self.allow_btn.place(
x=200,
y=10
)
self.deny_tf.width=5
self.deny_tf.place(x=10, y=360)
self.deny_btn.config(
text="Deny",
command=self.__on_deny
)
self.deny_btn.place(
x=200,
y=360
)
self.mw.mainloop()
def __on_update(self):
value = portlist()
self.textarea.config(state=NORMAL)
self.textarea.delete("1.0", "end")
self.textarea.insert(INSERT, value)
self.textarea.config(state=DISABLED)
def __on_allow(self):
port_value = self.allow_tf.get()
cmd1 = "sudo ufw allow "
cmd = (str(cmd1) + str(port_value))
os.system(cmd)
def __on_deny(self):
port_value = self.deny_tf.get()
cmd1 = "sudo ufw deny "
cmd = (str(cmd1) + str(port_value))
os.system(cmd)
mw = MainWindow()
mw.init() | 0.223038 | 0.053626 |
import torch
import torch.nn as nn
import numpy as np
def gen_non_linearity(A, non_linearity):
'''
Returns required activation for a tensor based on the inputs
non_linearity is either a callable or a value in
['tanh', 'sigmoid', 'relu', 'quantTanh', 'quantSigm', 'quantSigm4']
'''
if non_linearity == "tanh":
return torch.tanh(A)
elif non_linearity == "sigmoid":
return torch.sigmoid(A)
elif non_linearity == "relu":
return torch.relu(A, 0.0)
elif non_linearity == "quantTanh":
return torch.max(torch.min(A, torch.ones_like(A)), -1.0 * torch.ones_like(A))
elif non_linearity == "quantSigm":
A = (A + 1.0) / 2.0
return torch.max(torch.min(A, torch.ones_like(A)), torch.zeros_like(A))
elif non_linearity == "quantSigm4":
A = (A + 2.0) / 4.0
return torch.max(torch.min(A, torch.ones_like(A)), torch.zeros_like(A))
else:
# non_linearity is a user specified function
if not callable(non_linearity):
raise ValueError("non_linearity is either a callable or a value " +
+ "['tanh', 'sigmoid', 'relu', 'quantTanh', " +
"'quantSigm'")
return non_linearity(A)
class BaseRNN(nn.Module):
'''
Generic equivalent of static_rnn in tf
Used to unroll all the cell written in this file
We assume data to be batch_first by default ie.,
[batchSize, timeSteps, inputDims] else
[timeSteps, batchSize, inputDims]
'''
def __init__(self, RNNCell, batch_first=True):
super(BaseRNN, self).__init__()
self.RNNCell = RNNCell
self.batch_first = batch_first
def forward(self, input, hiddenState=None,
cellState=None):
if self.batch_first is True:
self.device = input.device
hiddenStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if hiddenState is None:
hiddenState = torch.zeros([input.shape[0],
self.RNNCell.output_size]).to(self.device)
if self.RNNCell.cellType == "LSTMLR":
cellStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if cellState is None:
cellState = torch.zeros(
[input.shape[0], self.RNNCell.output_size]).to(self.device)
for i in range(0, input.shape[1]):
hiddenState, cellState = self.RNNCell(
input[:, i, :], (hiddenState, cellState))
hiddenStates[:, i, :] = hiddenState
cellStates[:, i, :] = cellState
return hiddenStates, cellStates
else:
for i in range(0, input.shape[1]):
hiddenState = self.RNNCell(input[:, i, :], hiddenState)
hiddenStates[:, i, :] = hiddenState
return hiddenStates
else:
self.device = input.device
hiddenStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if hiddenState is None:
hiddenState = torch.zeros([input.shape[1],
self.RNNCell.output_size]).to(self.device)
if self.RNNCell.cellType == "LSTMLR":
cellStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if cellState is None:
cellState = torch.zeros(
[input.shape[1], self.RNNCell.output_size]).to(self.device)
for i in range(0, input.shape[0]):
hiddenState, cellState = self.RNNCell(
input[i, :, :], (hiddenState, cellState))
hiddenStates[i, :, :] = hiddenState
cellStates[i, :, :] = cellState
return hiddenStates, cellStates
else:
for i in range(0, input.shape[0]):
hiddenState = self.RNNCell(input[i, :, :], hiddenState)
hiddenStates[i, :, :] = hiddenState
return hiddenStates
class FastGRNNCell(nn.Module):
'''
FastGRNN Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix (creates two matrices if not None)
uRank = rank of U matrix (creates two matrices if not None)
zetaInit = init for zeta, the scale param
nuInit = init for nu, the translation param
FastGRNN architecture and compression techniques are found in
FastGRNN(LINK) paper
Basic architecture is like:
z_t = gate_nl(Wx_t + Uh_{t-1} + B_g)
h_t^ = update_nl(Wx_t + Uh_{t-1} + B_h)
h_t = z_t*h_{t-1} + (sigmoid(zeta)(1-z_t) + sigmoid(nu))*h_t^
W and U can further parameterised into low rank version by
W = matmul(W_1, W_2) and U = matmul(U_1, U_2)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
zetaInit=1.0, nuInit=-4.0, name="FastGRNN"):
super(FastGRNNCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [1, 1]
self._wRank = wRank
self._uRank = uRank
self._zetaInit = zetaInit
self._nuInit = nuInit
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W = nn.Parameter(0.1 * torch.randn([input_size, hidden_size]))
else:
self.W1 = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U1 = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_gate = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self.zeta = nn.Parameter(self._zetaInit * torch.ones([1, 1]))
self.nu = nn.Parameter(self._nuInit * torch.ones([1, 1]))
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "FastGRNN"
def forward(self, input, state):
if self._wRank is None:
wComp = torch.matmul(input, self.W)
else:
wComp = torch.matmul(
torch.matmul(input, self.W1), self.W2)
if self._uRank is None:
uComp = torch.matmul(state, self.U)
else:
uComp = torch.matmul(
torch.matmul(state, self.U1), self.U2)
pre_comp = wComp + uComp
z = gen_non_linearity(pre_comp + self.bias_gate,
self._gate_non_linearity)
c = gen_non_linearity(pre_comp + self.bias_update,
self._update_non_linearity)
new_h = z * state + (torch.sigmoid(self.zeta) *
(1.0 - z) + torch.sigmoid(self.nu)) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 1:
Vars.append(self.W)
else:
Vars.extend([self.W1, self.W2])
if self._num_weight_matrices[1] == 1:
Vars.append(self.U)
else:
Vars.extend([self.U1, self.U2])
Vars.extend([self.bias_gate, self.bias_update])
Vars.extend([self.zeta, self.nu])
return Vars
class FastRNNCell(nn.Module):
'''
FastRNN Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix (creates two matrices if not None)
uRank = rank of U matrix (creates two matrices if not None)
alphaInit = init for alpha, the update scalar
betaInit = init for beta, the weight for previous state
FastRNN architecture and compression techniques are found in
FastGRNN(LINK) paper
Basic architecture is like:
h_t^ = update_nl(Wx_t + Uh_{t-1} + B_h)
h_t = sigmoid(beta)*h_{t-1} + sigmoid(alpha)*h_t^
W and U can further parameterised into low rank version by
W = matmul(W_1, W_2) and U = matmul(U_1, U_2)
'''
def __init__(self, input_size, hidden_size,
update_non_linearity="tanh", wRank=None, uRank=None,
alphaInit=-3.0, betaInit=3.0, name="FastRNN"):
super(FastRNNCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [1, 1]
self._wRank = wRank
self._uRank = uRank
self._alphaInit = alphaInit
self._betaInit = betaInit
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W = nn.Parameter(0.1 * torch.randn([input_size, hidden_size]))
else:
self.W1 = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U1 = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self.alpha = nn.Parameter(self._alphaInit * torch.ones([1, 1]))
self.beta = nn.Parameter(self._betaInit * torch.ones([1, 1]))
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "FastRNN"
def forward(self, input, state):
if self._wRank is None:
wComp = torch.matmul(input, self.W)
else:
wComp = torch.matmul(
torch.matmul(input, self.W1), self.W2)
if self._uRank is None:
uComp = torch.matmul(state, self.U)
else:
uComp = torch.matmul(
torch.matmul(state, self.U1), self.U2)
pre_comp = wComp + uComp
c = gen_non_linearity(pre_comp + self.bias_update,
self._update_non_linearity)
new_h = torch.sigmoid(self.beta) * state + \
torch.sigmoid(self.alpha) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 1:
Vars.append(self.W)
else:
Vars.extend([self.W1, self.W2])
if self._num_weight_matrices[1] == 1:
Vars.append(self.U)
else:
Vars.extend([self.U1, self.U2])
Vars.extend([self.bias_update])
Vars.extend([self.alpha, self.beta])
return Vars
class LSTMLRCell(nn.Module):
'''
LR - Low Rank
LSTM LR Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of all W matrices
(creates 5 matrices if not None else creates 4 matrices)
uRank = rank of all U matrices
(creates 5 matrices if not None else creates 4 matrices)
LSTM architecture and compression techniques are found in
LSTM paper
Basic architecture is like:
f_t = gate_nl(W1x_t + U1h_{t-1} + B_f)
i_t = gate_nl(W2x_t + U2h_{t-1} + B_i)
C_t^ = update_nl(W3x_t + U3h_{t-1} + B_c)
o_t = gate_nl(W4x_t + U4h_{t-1} + B_o)
C_t = f_t*C_{t-1} + i_t*C_t^
h_t = o_t*update_nl(C_t)
Wi and Ui can further parameterised into low rank version by
Wi = matmul(W, W_i) and Ui = matmul(U, U_i)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
name="LSTMLR"):
super(LSTMLRCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [4, 4]
self._wRank = wRank
self._uRank = uRank
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W1 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W2 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W3 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W4 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
else:
self.W = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W1 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W3 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W4 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U1 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U2 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U3 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U4 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U1 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U3 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U4 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_f = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_i = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_c = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_o = nn.Parameter(torch.ones([1, hidden_size]))
@property
def state_size(self):
return 2 * self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "LSTMLR"
def forward(self, input, hiddenStates):
(h, c) = hiddenStates
if self._wRank is None:
wComp1 = torch.matmul(input, self.W1)
wComp2 = torch.matmul(input, self.W2)
wComp3 = torch.matmul(input, self.W3)
wComp4 = torch.matmul(input, self.W4)
else:
wComp1 = torch.matmul(
torch.matmul(input, self.W), self.W1)
wComp2 = torch.matmul(
torch.matmul(input, self.W), self.W2)
wComp3 = torch.matmul(
torch.matmul(input, self.W), self.W3)
wComp4 = torch.matmul(
torch.matmul(input, self.W), self.W4)
if self._uRank is None:
uComp1 = torch.matmul(h, self.U1)
uComp2 = torch.matmul(h, self.U2)
uComp3 = torch.matmul(h, self.U3)
uComp4 = torch.matmul(h, self.U4)
else:
uComp1 = torch.matmul(
torch.matmul(h, self.U), self.U1)
uComp2 = torch.matmul(
torch.matmul(h, self.U), self.U2)
uComp3 = torch.matmul(
torch.matmul(h, self.U), self.U3)
uComp4 = torch.matmul(
torch.matmul(h, self.U), self.U4)
pre_comp1 = wComp1 + uComp1
pre_comp2 = wComp2 + uComp2
pre_comp3 = wComp3 + uComp3
pre_comp4 = wComp4 + uComp4
i = gen_non_linearity(pre_comp1 + self.bias_i,
self._gate_non_linearity)
f = gen_non_linearity(pre_comp2 + self.bias_f,
self._gate_non_linearity)
o = gen_non_linearity(pre_comp4 + self.bias_o,
self._gate_non_linearity)
c_ = gen_non_linearity(pre_comp3 + self.bias_c,
self._update_non_linearity)
new_c = f * c + i * c_
new_h = o * gen_non_linearity(new_c, self._update_non_linearity)
return new_h, new_c
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 4:
Vars.extend([self.W1, self.W2, self.W3, self.W4])
else:
Vars.extend([self.W, self.W1, self.W2, self.W3, self.W4])
if self._num_weight_matrices[1] == 4:
Vars.extend([self.U1, self.U2, self.U3, self.U4])
else:
Vars.extend([self.U, self.U1, self.U2, self.U3, self.U4])
Vars.extend([self.bias_f, self.bias_i, self.bias_c, self.bias_o])
return Vars
class GRULRCell(nn.Module):
'''
GRU LR Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix
(creates 4 matrices if not None else creates 3 matrices)
uRank = rank of U matrix
(creates 4 matrices if not None else creates 3 matrices)
GRU architecture and compression techniques are found in
GRU(LINK) paper
Basic architecture is like:
r_t = gate_nl(W1x_t + U1h_{t-1} + B_r)
z_t = gate_nl(W2x_t + U2h_{t-1} + B_g)
h_t^ = update_nl(W3x_t + r_t*U3(h_{t-1}) + B_h)
h_t = z_t*h_{t-1} + (1-z_t)*h_t^
Wi and Ui can further parameterised into low rank version by
Wi = matmul(W, W_i) and Ui = matmul(U, U_i)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
name="GRULR"):
super(GRULRCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [3, 3]
self._wRank = wRank
self._uRank = uRank
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W1 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W2 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W3 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
else:
self.W = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W1 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W3 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U1 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U2 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U3 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U1 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U3 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_r = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_gate = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self._device = self.bias_update.device
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "GRULR"
def forward(self, input, state):
if self._wRank is None:
wComp1 = torch.matmul(input, self.W1)
wComp2 = torch.matmul(input, self.W2)
wComp3 = torch.matmul(input, self.W3)
else:
wComp1 = torch.matmul(
torch.matmul(input, self.W), self.W1)
wComp2 = torch.matmul(
torch.matmul(input, self.W), self.W2)
wComp3 = torch.matmul(
torch.matmul(input, self.W), self.W3)
if self._uRank is None:
uComp1 = torch.matmul(state, self.U1)
uComp2 = torch.matmul(state, self.U2)
else:
uComp1 = torch.matmul(
torch.matmul(state, self.U), self.U1)
uComp2 = torch.matmul(
torch.matmul(state, self.U), self.U2)
pre_comp1 = wComp1 + uComp1
pre_comp2 = wComp2 + uComp2
r = gen_non_linearity(pre_comp1 + self.bias_r,
self._gate_non_linearity)
z = gen_non_linearity(pre_comp2 + self.bias_gate,
self._gate_non_linearity)
if self._uRank is None:
pre_comp3 = wComp3 + torch.matmul(r * state, self.U3)
else:
pre_comp3 = wComp3 + \
torch.matmul(torch.matmul(r * state, self.U), self.U3)
c = gen_non_linearity(pre_comp3 + self.bias_update,
self._update_non_linearity)
new_h = z * state + (1.0 - z) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 3:
Vars.extend([self.W1, self.W2, self.W3])
else:
Vars.extend([self.W, self.W1, self.W2, self.W3])
if self._num_weight_matrices[1] == 3:
Vars.extend([self.U1, self.U2, self.U3])
else:
Vars.extend([self.U, self.U1, self.U2, self.U3])
Vars.extend([self.bias_r, self.bias_gate, self.bias_update])
return Vars
class UGRNNLRCell(nn.Module):
'''
UGRNN LR Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix
(creates 3 matrices if not None else creates 2 matrices)
uRank = rank of U matrix
(creates 3 matrices if not None else creates 2 matrices)
UGRNN architecture and compression techniques are found in
UGRNN(LINK) paper
Basic architecture is like:
z_t = gate_nl(W1x_t + U1h_{t-1} + B_g)
h_t^ = update_nl(W1x_t + U1h_{t-1} + B_h)
h_t = z_t*h_{t-1} + (1-z_t)*h_t^
Wi and Ui can further parameterised into low rank version by
Wi = matmul(W, W_i) and Ui = matmul(U, U_i)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
name="UGRNNLR"):
super(UGRNNLRCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [2, 2]
self._wRank = wRank
self._uRank = uRank
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W1 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W2 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
else:
self.W = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W1 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U1 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U2 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U1 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_gate = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self._device = self.bias_update.device
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "UGRNNLR"
def forward(self, input, state):
if self._wRank is None:
wComp1 = torch.matmul(input, self.W1)
wComp2 = torch.matmul(input, self.W2)
else:
wComp1 = torch.matmul(
torch.matmul(input, self.W), self.W1)
wComp2 = torch.matmul(
torch.matmul(input, self.W), self.W2)
if self._uRank is None:
uComp1 = torch.matmul(state, self.U1)
uComp2 = torch.matmul(state, self.U2)
else:
uComp1 = torch.matmul(
torch.matmul(state, self.U), self.U1)
uComp2 = torch.matmul(
torch.matmul(state, self.U), self.U2)
pre_comp1 = wComp1 + uComp1
pre_comp2 = wComp2 + uComp2
z = gen_non_linearity(pre_comp1 + self.bias_gate,
self._gate_non_linearity)
c = gen_non_linearity(pre_comp2 + self.bias_update,
self._update_non_linearity)
new_h = z * state + (1.0 - z) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 2:
Vars.extend([self.W1, self.W2])
else:
Vars.extend([self.W, self.W1, self.W2])
if self._num_weight_matrices[1] == 2:
Vars.extend([self.U1, self.U2])
else:
Vars.extend([self.U, self.U1, self.U2])
Vars.extend([self.bias_gate, self.bias_update])
return Vars
class LSTM(nn.Module):
"""Equivalent to nn.LSTM using LSTMLRCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None, batch_first=True):
super(LSTM, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = LSTMLRCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class GRU(nn.Module):
"""Equivalent to nn.GRU using GRULRCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None, batch_first=True):
super(GRU, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = GRULRCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class UGRNN(nn.Module):
"""Equivalent to nn.UGRNN using UGRNNLRCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None, batch_first=True):
super(UGRNN, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = UGRNNLRCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class FastRNN(nn.Module):
"""Equivalent to nn.FastRNN using FastRNNCell"""
def __init__(self, input_size, hidden_size,
update_non_linearity="tanh", wRank=None, uRank=None,
alphaInit=-3.0, betaInit=3.0, batch_first=True):
super(FastRNN, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = FastRNNCell(input_size, hidden_size,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank,
alphaInit=alphaInit, betaInit=betaInit)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class FastGRNN(nn.Module):
"""Equivalent to nn.FastGRNN using FastGRNNCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
zetaInit=1.0, nuInit=-4.0, batch_first=True):
super(FastGRNN, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = FastGRNNCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank,
zetaInit=zetaInit, nuInit=nuInit)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class SRNN2(nn.Module):
def __init__(self, inputDim, outputDim, hiddenDim0, hiddenDim1, cellType,
dropoutProbability0 = None, dropoutProbability1 = None,
**cellArgs):
'''
A 2 Layer Shallow RNN.
inputDim: Input data's feature dimension.
hiddenDim0: Hidden state dimension of the lower layer RNN cell.
hiddenDim1: Hidden state dimension of the second layer RNN cell.
cellType: The type of RNN cell to use. Options are ['LSTM', 'FastRNNCell',
'FastGRNNCell', 'GRULRCell']
'''
super(SRNN2, self).__init__()
# Create two RNN Cells
self.inputDim = inputDim
self.hiddenDim0 = hiddenDim0
self.hiddenDim1 = hiddenDim1
self.outputDim = outputDim
self.dropoutProbability0 = dropoutProbability0
self.dropoutProbability1 = dropoutProbability1
if dropoutProbability0 != None:
assert 0 < dropoutProbability0 <= 1.0
if dropoutProbability1 != None:
assert 0 < dropoutProbability1 <= 1.0
self.cellArgs = {}
self.cellArgs.update(cellArgs)
supportedCells = ['LSTM', 'FastRNNCell', 'FastGRNNCell', 'GRULRCell']
assert cellType in supportedCells, 'Currently supported cells: %r' % supportedCells
self.cellType = cellType
if self.cellType == 'LSTM':
self.rnnClass = nn.LSTM
elif self.cellType == 'FastRNNCell':
self.rnnClass = FastRNN
elif self.cellType == 'FastGRNNCell':
self.rnnClass = FastGRNN
else:
self.rnnClass = GRU
self.rnn0 = self.rnnClass(input_size=inputDim, hidden_size=hiddenDim0, **self.cellArgs)
self.rnn1 = self.rnnClass(input_size=hiddenDim0, hidden_size=hiddenDim1, **self.cellArgs)
self.W = torch.randn([self.hiddenDim1, self.outputDim])
self.W = nn.Parameter(self.W)
self.B = torch.randn([self.outputDim])
self.B = nn.Parameter(self.B)
def getBrickedData(self, x, brickSize):
'''
Takes x of shape [timeSteps, batchSize, featureDim] and returns bricked
x of shape [numBricks, brickSize, batchSize, featureDim] by chunking
along 0-th axes.
'''
timeSteps = list(x.size())[0]
numSplits = int(timeSteps / brickSize)
batchSize = list(x.size())[1]
featureDim = list(x.size())[2]
numBricks = int(timeSteps/brickSize)
eqlen = numSplits * brickSize
x = x[:eqlen]
x_bricked = torch.split(x, numSplits, dim = 0)
x_bricked_batched = torch.cat(x_bricked)
x_bricked_batched = torch.reshape(x_bricked_batched, (numBricks,brickSize,batchSize,featureDim))
return x_bricked_batched
def forward(self, x, brickSize):
'''
x: Input data in numpy. Expected to be a 3D tensor with shape
[timeStep, batchSize, featureDim]. Note that this is different from
the convention followed in the TF codebase.
brickSize: The brick size for the lower dimension. The input data will
be divided into bricks along the timeStep axis (axis=0) internally
and fed into the lowest layer RNN. Note that if the last brick has
fewer than 'brickSize' steps, it will be ignored (no internal
padding is done).
'''
assert x.ndimension() == 3
assert list(x.size())[2] == self.inputDim
x_bricks = self.getBrickedData(x, brickSize)
# x bricks: [numBricks, brickSize, batchSize, featureDim]
x_bricks = x_bricks.permute(1,0,2,3)
# x bricks: [brickSize, numBricks, batchSize, featureDim]
oldShape = list(x_bricks.size())
x_bricks = torch.reshape(x_bricks, [oldShape[0], oldShape[1] * oldShape[2], oldShape[3]])
# x bricks: [brickSize, numBricks * batchSize, featureDim]
# x_bricks = torch.Tensor(x_bricks)
self.dropoutLayer0 = None
self.dropoutLayer1 = None
if self.cellType == 'LSTM':
hidd0, out0 = self.rnn0(x_bricks)
else:
hidd0 = self.rnn0(x_bricks)
if self.dropoutProbability0 != None:
self.dropoutLayer0 = nn.Dropout(p=self.dropoutProbability0)
hidd0 = self.dropoutLayer0(hidd0)
hidd0 = torch.squeeze(hidd0[-1])
# [numBricks * batchSize, hiddenDim0]
inp1 = hidd0.view(oldShape[1], oldShape[2], self.hiddenDim0)
# [numBricks, batchSize, hiddenDim0]
if self.cellType == 'LSTM':
hidd1, out1 = self.rnn1(inp1)
else:
hidd1 = self.rnn1(inp1)
if self.dropoutProbability1 != None:
self.dropoutLayer1 = nn.Dropout(p=self.dropoutProbability1)
hidd1 = self.dropoutLayer1(hidd1)
hidd1 = torch.squeeze(hidd1[-1])
out = torch.matmul(hidd1, self.W) + self.B
return out | pytorch/pytorch_edgeml/graph/rnn.py |
import torch
import torch.nn as nn
import numpy as np
def gen_non_linearity(A, non_linearity):
'''
Returns required activation for a tensor based on the inputs
non_linearity is either a callable or a value in
['tanh', 'sigmoid', 'relu', 'quantTanh', 'quantSigm', 'quantSigm4']
'''
if non_linearity == "tanh":
return torch.tanh(A)
elif non_linearity == "sigmoid":
return torch.sigmoid(A)
elif non_linearity == "relu":
return torch.relu(A, 0.0)
elif non_linearity == "quantTanh":
return torch.max(torch.min(A, torch.ones_like(A)), -1.0 * torch.ones_like(A))
elif non_linearity == "quantSigm":
A = (A + 1.0) / 2.0
return torch.max(torch.min(A, torch.ones_like(A)), torch.zeros_like(A))
elif non_linearity == "quantSigm4":
A = (A + 2.0) / 4.0
return torch.max(torch.min(A, torch.ones_like(A)), torch.zeros_like(A))
else:
# non_linearity is a user specified function
if not callable(non_linearity):
raise ValueError("non_linearity is either a callable or a value " +
+ "['tanh', 'sigmoid', 'relu', 'quantTanh', " +
"'quantSigm'")
return non_linearity(A)
class BaseRNN(nn.Module):
'''
Generic equivalent of static_rnn in tf
Used to unroll all the cell written in this file
We assume data to be batch_first by default ie.,
[batchSize, timeSteps, inputDims] else
[timeSteps, batchSize, inputDims]
'''
def __init__(self, RNNCell, batch_first=True):
super(BaseRNN, self).__init__()
self.RNNCell = RNNCell
self.batch_first = batch_first
def forward(self, input, hiddenState=None,
cellState=None):
if self.batch_first is True:
self.device = input.device
hiddenStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if hiddenState is None:
hiddenState = torch.zeros([input.shape[0],
self.RNNCell.output_size]).to(self.device)
if self.RNNCell.cellType == "LSTMLR":
cellStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if cellState is None:
cellState = torch.zeros(
[input.shape[0], self.RNNCell.output_size]).to(self.device)
for i in range(0, input.shape[1]):
hiddenState, cellState = self.RNNCell(
input[:, i, :], (hiddenState, cellState))
hiddenStates[:, i, :] = hiddenState
cellStates[:, i, :] = cellState
return hiddenStates, cellStates
else:
for i in range(0, input.shape[1]):
hiddenState = self.RNNCell(input[:, i, :], hiddenState)
hiddenStates[:, i, :] = hiddenState
return hiddenStates
else:
self.device = input.device
hiddenStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if hiddenState is None:
hiddenState = torch.zeros([input.shape[1],
self.RNNCell.output_size]).to(self.device)
if self.RNNCell.cellType == "LSTMLR":
cellStates = torch.zeros(
[input.shape[0], input.shape[1],
self.RNNCell.output_size]).to(self.device)
if cellState is None:
cellState = torch.zeros(
[input.shape[1], self.RNNCell.output_size]).to(self.device)
for i in range(0, input.shape[0]):
hiddenState, cellState = self.RNNCell(
input[i, :, :], (hiddenState, cellState))
hiddenStates[i, :, :] = hiddenState
cellStates[i, :, :] = cellState
return hiddenStates, cellStates
else:
for i in range(0, input.shape[0]):
hiddenState = self.RNNCell(input[i, :, :], hiddenState)
hiddenStates[i, :, :] = hiddenState
return hiddenStates
class FastGRNNCell(nn.Module):
'''
FastGRNN Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix (creates two matrices if not None)
uRank = rank of U matrix (creates two matrices if not None)
zetaInit = init for zeta, the scale param
nuInit = init for nu, the translation param
FastGRNN architecture and compression techniques are found in
FastGRNN(LINK) paper
Basic architecture is like:
z_t = gate_nl(Wx_t + Uh_{t-1} + B_g)
h_t^ = update_nl(Wx_t + Uh_{t-1} + B_h)
h_t = z_t*h_{t-1} + (sigmoid(zeta)(1-z_t) + sigmoid(nu))*h_t^
W and U can further parameterised into low rank version by
W = matmul(W_1, W_2) and U = matmul(U_1, U_2)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
zetaInit=1.0, nuInit=-4.0, name="FastGRNN"):
super(FastGRNNCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [1, 1]
self._wRank = wRank
self._uRank = uRank
self._zetaInit = zetaInit
self._nuInit = nuInit
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W = nn.Parameter(0.1 * torch.randn([input_size, hidden_size]))
else:
self.W1 = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U1 = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_gate = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self.zeta = nn.Parameter(self._zetaInit * torch.ones([1, 1]))
self.nu = nn.Parameter(self._nuInit * torch.ones([1, 1]))
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "FastGRNN"
def forward(self, input, state):
if self._wRank is None:
wComp = torch.matmul(input, self.W)
else:
wComp = torch.matmul(
torch.matmul(input, self.W1), self.W2)
if self._uRank is None:
uComp = torch.matmul(state, self.U)
else:
uComp = torch.matmul(
torch.matmul(state, self.U1), self.U2)
pre_comp = wComp + uComp
z = gen_non_linearity(pre_comp + self.bias_gate,
self._gate_non_linearity)
c = gen_non_linearity(pre_comp + self.bias_update,
self._update_non_linearity)
new_h = z * state + (torch.sigmoid(self.zeta) *
(1.0 - z) + torch.sigmoid(self.nu)) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 1:
Vars.append(self.W)
else:
Vars.extend([self.W1, self.W2])
if self._num_weight_matrices[1] == 1:
Vars.append(self.U)
else:
Vars.extend([self.U1, self.U2])
Vars.extend([self.bias_gate, self.bias_update])
Vars.extend([self.zeta, self.nu])
return Vars
class FastRNNCell(nn.Module):
'''
FastRNN Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix (creates two matrices if not None)
uRank = rank of U matrix (creates two matrices if not None)
alphaInit = init for alpha, the update scalar
betaInit = init for beta, the weight for previous state
FastRNN architecture and compression techniques are found in
FastGRNN(LINK) paper
Basic architecture is like:
h_t^ = update_nl(Wx_t + Uh_{t-1} + B_h)
h_t = sigmoid(beta)*h_{t-1} + sigmoid(alpha)*h_t^
W and U can further parameterised into low rank version by
W = matmul(W_1, W_2) and U = matmul(U_1, U_2)
'''
def __init__(self, input_size, hidden_size,
update_non_linearity="tanh", wRank=None, uRank=None,
alphaInit=-3.0, betaInit=3.0, name="FastRNN"):
super(FastRNNCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [1, 1]
self._wRank = wRank
self._uRank = uRank
self._alphaInit = alphaInit
self._betaInit = betaInit
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W = nn.Parameter(0.1 * torch.randn([input_size, hidden_size]))
else:
self.W1 = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U1 = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self.alpha = nn.Parameter(self._alphaInit * torch.ones([1, 1]))
self.beta = nn.Parameter(self._betaInit * torch.ones([1, 1]))
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "FastRNN"
def forward(self, input, state):
if self._wRank is None:
wComp = torch.matmul(input, self.W)
else:
wComp = torch.matmul(
torch.matmul(input, self.W1), self.W2)
if self._uRank is None:
uComp = torch.matmul(state, self.U)
else:
uComp = torch.matmul(
torch.matmul(state, self.U1), self.U2)
pre_comp = wComp + uComp
c = gen_non_linearity(pre_comp + self.bias_update,
self._update_non_linearity)
new_h = torch.sigmoid(self.beta) * state + \
torch.sigmoid(self.alpha) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 1:
Vars.append(self.W)
else:
Vars.extend([self.W1, self.W2])
if self._num_weight_matrices[1] == 1:
Vars.append(self.U)
else:
Vars.extend([self.U1, self.U2])
Vars.extend([self.bias_update])
Vars.extend([self.alpha, self.beta])
return Vars
class LSTMLRCell(nn.Module):
'''
LR - Low Rank
LSTM LR Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of all W matrices
(creates 5 matrices if not None else creates 4 matrices)
uRank = rank of all U matrices
(creates 5 matrices if not None else creates 4 matrices)
LSTM architecture and compression techniques are found in
LSTM paper
Basic architecture is like:
f_t = gate_nl(W1x_t + U1h_{t-1} + B_f)
i_t = gate_nl(W2x_t + U2h_{t-1} + B_i)
C_t^ = update_nl(W3x_t + U3h_{t-1} + B_c)
o_t = gate_nl(W4x_t + U4h_{t-1} + B_o)
C_t = f_t*C_{t-1} + i_t*C_t^
h_t = o_t*update_nl(C_t)
Wi and Ui can further parameterised into low rank version by
Wi = matmul(W, W_i) and Ui = matmul(U, U_i)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
name="LSTMLR"):
super(LSTMLRCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [4, 4]
self._wRank = wRank
self._uRank = uRank
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W1 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W2 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W3 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W4 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
else:
self.W = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W1 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W3 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W4 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U1 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U2 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U3 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U4 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U1 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U3 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U4 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_f = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_i = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_c = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_o = nn.Parameter(torch.ones([1, hidden_size]))
@property
def state_size(self):
return 2 * self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "LSTMLR"
def forward(self, input, hiddenStates):
(h, c) = hiddenStates
if self._wRank is None:
wComp1 = torch.matmul(input, self.W1)
wComp2 = torch.matmul(input, self.W2)
wComp3 = torch.matmul(input, self.W3)
wComp4 = torch.matmul(input, self.W4)
else:
wComp1 = torch.matmul(
torch.matmul(input, self.W), self.W1)
wComp2 = torch.matmul(
torch.matmul(input, self.W), self.W2)
wComp3 = torch.matmul(
torch.matmul(input, self.W), self.W3)
wComp4 = torch.matmul(
torch.matmul(input, self.W), self.W4)
if self._uRank is None:
uComp1 = torch.matmul(h, self.U1)
uComp2 = torch.matmul(h, self.U2)
uComp3 = torch.matmul(h, self.U3)
uComp4 = torch.matmul(h, self.U4)
else:
uComp1 = torch.matmul(
torch.matmul(h, self.U), self.U1)
uComp2 = torch.matmul(
torch.matmul(h, self.U), self.U2)
uComp3 = torch.matmul(
torch.matmul(h, self.U), self.U3)
uComp4 = torch.matmul(
torch.matmul(h, self.U), self.U4)
pre_comp1 = wComp1 + uComp1
pre_comp2 = wComp2 + uComp2
pre_comp3 = wComp3 + uComp3
pre_comp4 = wComp4 + uComp4
i = gen_non_linearity(pre_comp1 + self.bias_i,
self._gate_non_linearity)
f = gen_non_linearity(pre_comp2 + self.bias_f,
self._gate_non_linearity)
o = gen_non_linearity(pre_comp4 + self.bias_o,
self._gate_non_linearity)
c_ = gen_non_linearity(pre_comp3 + self.bias_c,
self._update_non_linearity)
new_c = f * c + i * c_
new_h = o * gen_non_linearity(new_c, self._update_non_linearity)
return new_h, new_c
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 4:
Vars.extend([self.W1, self.W2, self.W3, self.W4])
else:
Vars.extend([self.W, self.W1, self.W2, self.W3, self.W4])
if self._num_weight_matrices[1] == 4:
Vars.extend([self.U1, self.U2, self.U3, self.U4])
else:
Vars.extend([self.U, self.U1, self.U2, self.U3, self.U4])
Vars.extend([self.bias_f, self.bias_i, self.bias_c, self.bias_o])
return Vars
class GRULRCell(nn.Module):
'''
GRU LR Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix
(creates 4 matrices if not None else creates 3 matrices)
uRank = rank of U matrix
(creates 4 matrices if not None else creates 3 matrices)
GRU architecture and compression techniques are found in
GRU(LINK) paper
Basic architecture is like:
r_t = gate_nl(W1x_t + U1h_{t-1} + B_r)
z_t = gate_nl(W2x_t + U2h_{t-1} + B_g)
h_t^ = update_nl(W3x_t + r_t*U3(h_{t-1}) + B_h)
h_t = z_t*h_{t-1} + (1-z_t)*h_t^
Wi and Ui can further parameterised into low rank version by
Wi = matmul(W, W_i) and Ui = matmul(U, U_i)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
name="GRULR"):
super(GRULRCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [3, 3]
self._wRank = wRank
self._uRank = uRank
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W1 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W2 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W3 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
else:
self.W = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W1 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W3 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U1 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U2 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U3 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U1 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U3 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_r = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_gate = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self._device = self.bias_update.device
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "GRULR"
def forward(self, input, state):
if self._wRank is None:
wComp1 = torch.matmul(input, self.W1)
wComp2 = torch.matmul(input, self.W2)
wComp3 = torch.matmul(input, self.W3)
else:
wComp1 = torch.matmul(
torch.matmul(input, self.W), self.W1)
wComp2 = torch.matmul(
torch.matmul(input, self.W), self.W2)
wComp3 = torch.matmul(
torch.matmul(input, self.W), self.W3)
if self._uRank is None:
uComp1 = torch.matmul(state, self.U1)
uComp2 = torch.matmul(state, self.U2)
else:
uComp1 = torch.matmul(
torch.matmul(state, self.U), self.U1)
uComp2 = torch.matmul(
torch.matmul(state, self.U), self.U2)
pre_comp1 = wComp1 + uComp1
pre_comp2 = wComp2 + uComp2
r = gen_non_linearity(pre_comp1 + self.bias_r,
self._gate_non_linearity)
z = gen_non_linearity(pre_comp2 + self.bias_gate,
self._gate_non_linearity)
if self._uRank is None:
pre_comp3 = wComp3 + torch.matmul(r * state, self.U3)
else:
pre_comp3 = wComp3 + \
torch.matmul(torch.matmul(r * state, self.U), self.U3)
c = gen_non_linearity(pre_comp3 + self.bias_update,
self._update_non_linearity)
new_h = z * state + (1.0 - z) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 3:
Vars.extend([self.W1, self.W2, self.W3])
else:
Vars.extend([self.W, self.W1, self.W2, self.W3])
if self._num_weight_matrices[1] == 3:
Vars.extend([self.U1, self.U2, self.U3])
else:
Vars.extend([self.U, self.U1, self.U2, self.U3])
Vars.extend([self.bias_r, self.bias_gate, self.bias_update])
return Vars
class UGRNNLRCell(nn.Module):
'''
UGRNN LR Cell with Both Full Rank and Low Rank Formulations
Has multiple activation functions for the gates
hidden_size = # hidden units
gate_non_linearity = nonlinearity for the gate can be chosen from
[tanh, sigmoid, relu, quantTanh, quantSigm]
update_non_linearity = nonlinearity for final rnn update
can be chosen from [tanh, sigmoid, relu, quantTanh, quantSigm]
wRank = rank of W matrix
(creates 3 matrices if not None else creates 2 matrices)
uRank = rank of U matrix
(creates 3 matrices if not None else creates 2 matrices)
UGRNN architecture and compression techniques are found in
UGRNN(LINK) paper
Basic architecture is like:
z_t = gate_nl(W1x_t + U1h_{t-1} + B_g)
h_t^ = update_nl(W1x_t + U1h_{t-1} + B_h)
h_t = z_t*h_{t-1} + (1-z_t)*h_t^
Wi and Ui can further parameterised into low rank version by
Wi = matmul(W, W_i) and Ui = matmul(U, U_i)
'''
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
name="UGRNNLR"):
super(UGRNNLRCell, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._num_weight_matrices = [2, 2]
self._wRank = wRank
self._uRank = uRank
if wRank is not None:
self._num_weight_matrices[0] += 1
if uRank is not None:
self._num_weight_matrices[1] += 1
self._name = name
if wRank is None:
self.W1 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
self.W2 = nn.Parameter(
0.1 * torch.randn([input_size, hidden_size]))
else:
self.W = nn.Parameter(0.1 * torch.randn([input_size, wRank]))
self.W1 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
self.W2 = nn.Parameter(0.1 * torch.randn([wRank, hidden_size]))
if uRank is None:
self.U1 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
self.U2 = nn.Parameter(
0.1 * torch.randn([hidden_size, hidden_size]))
else:
self.U = nn.Parameter(0.1 * torch.randn([hidden_size, uRank]))
self.U1 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.U2 = nn.Parameter(0.1 * torch.randn([uRank, hidden_size]))
self.bias_gate = nn.Parameter(torch.ones([1, hidden_size]))
self.bias_update = nn.Parameter(torch.ones([1, hidden_size]))
self._device = self.bias_update.device
@property
def state_size(self):
return self._hidden_size
@property
def input_size(self):
return self._input_size
@property
def output_size(self):
return self._hidden_size
@property
def gate_non_linearity(self):
return self._gate_non_linearity
@property
def update_non_linearity(self):
return self._update_non_linearity
@property
def wRank(self):
return self._wRank
@property
def uRank(self):
return self._uRank
@property
def num_weight_matrices(self):
return self._num_weight_matrices
@property
def name(self):
return self._name
@property
def cellType(self):
return "UGRNNLR"
def forward(self, input, state):
if self._wRank is None:
wComp1 = torch.matmul(input, self.W1)
wComp2 = torch.matmul(input, self.W2)
else:
wComp1 = torch.matmul(
torch.matmul(input, self.W), self.W1)
wComp2 = torch.matmul(
torch.matmul(input, self.W), self.W2)
if self._uRank is None:
uComp1 = torch.matmul(state, self.U1)
uComp2 = torch.matmul(state, self.U2)
else:
uComp1 = torch.matmul(
torch.matmul(state, self.U), self.U1)
uComp2 = torch.matmul(
torch.matmul(state, self.U), self.U2)
pre_comp1 = wComp1 + uComp1
pre_comp2 = wComp2 + uComp2
z = gen_non_linearity(pre_comp1 + self.bias_gate,
self._gate_non_linearity)
c = gen_non_linearity(pre_comp2 + self.bias_update,
self._update_non_linearity)
new_h = z * state + (1.0 - z) * c
return new_h
def getVars(self):
Vars = []
if self._num_weight_matrices[0] == 2:
Vars.extend([self.W1, self.W2])
else:
Vars.extend([self.W, self.W1, self.W2])
if self._num_weight_matrices[1] == 2:
Vars.extend([self.U1, self.U2])
else:
Vars.extend([self.U, self.U1, self.U2])
Vars.extend([self.bias_gate, self.bias_update])
return Vars
class LSTM(nn.Module):
"""Equivalent to nn.LSTM using LSTMLRCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None, batch_first=True):
super(LSTM, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = LSTMLRCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class GRU(nn.Module):
"""Equivalent to nn.GRU using GRULRCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None, batch_first=True):
super(GRU, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = GRULRCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class UGRNN(nn.Module):
"""Equivalent to nn.UGRNN using UGRNNLRCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None, batch_first=True):
super(UGRNN, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = UGRNNLRCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class FastRNN(nn.Module):
"""Equivalent to nn.FastRNN using FastRNNCell"""
def __init__(self, input_size, hidden_size,
update_non_linearity="tanh", wRank=None, uRank=None,
alphaInit=-3.0, betaInit=3.0, batch_first=True):
super(FastRNN, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = FastRNNCell(input_size, hidden_size,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank,
alphaInit=alphaInit, betaInit=betaInit)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class FastGRNN(nn.Module):
"""Equivalent to nn.FastGRNN using FastGRNNCell"""
def __init__(self, input_size, hidden_size, gate_non_linearity="sigmoid",
update_non_linearity="tanh", wRank=None, uRank=None,
zetaInit=1.0, nuInit=-4.0, batch_first=True):
super(FastGRNN, self).__init__()
self._input_size = input_size
self._hidden_size = hidden_size
self._gate_non_linearity = gate_non_linearity
self._update_non_linearity = update_non_linearity
self._wRank = wRank
self._uRank = uRank
self.batch_first = batch_first
self.cell = FastGRNNCell(input_size, hidden_size,
gate_non_linearity=gate_non_linearity,
update_non_linearity=update_non_linearity,
wRank=wRank, uRank=uRank,
zetaInit=zetaInit, nuInit=nuInit)
self.unrollRNN = BaseRNN(self.cell, batch_first=self.batch_first)
def forward(self, input, hiddenState=None, cellState=None):
return self.unrollRNN(input, hiddenState, cellState)
class SRNN2(nn.Module):
def __init__(self, inputDim, outputDim, hiddenDim0, hiddenDim1, cellType,
dropoutProbability0 = None, dropoutProbability1 = None,
**cellArgs):
'''
A 2 Layer Shallow RNN.
inputDim: Input data's feature dimension.
hiddenDim0: Hidden state dimension of the lower layer RNN cell.
hiddenDim1: Hidden state dimension of the second layer RNN cell.
cellType: The type of RNN cell to use. Options are ['LSTM', 'FastRNNCell',
'FastGRNNCell', 'GRULRCell']
'''
super(SRNN2, self).__init__()
# Create two RNN Cells
self.inputDim = inputDim
self.hiddenDim0 = hiddenDim0
self.hiddenDim1 = hiddenDim1
self.outputDim = outputDim
self.dropoutProbability0 = dropoutProbability0
self.dropoutProbability1 = dropoutProbability1
if dropoutProbability0 != None:
assert 0 < dropoutProbability0 <= 1.0
if dropoutProbability1 != None:
assert 0 < dropoutProbability1 <= 1.0
self.cellArgs = {}
self.cellArgs.update(cellArgs)
supportedCells = ['LSTM', 'FastRNNCell', 'FastGRNNCell', 'GRULRCell']
assert cellType in supportedCells, 'Currently supported cells: %r' % supportedCells
self.cellType = cellType
if self.cellType == 'LSTM':
self.rnnClass = nn.LSTM
elif self.cellType == 'FastRNNCell':
self.rnnClass = FastRNN
elif self.cellType == 'FastGRNNCell':
self.rnnClass = FastGRNN
else:
self.rnnClass = GRU
self.rnn0 = self.rnnClass(input_size=inputDim, hidden_size=hiddenDim0, **self.cellArgs)
self.rnn1 = self.rnnClass(input_size=hiddenDim0, hidden_size=hiddenDim1, **self.cellArgs)
self.W = torch.randn([self.hiddenDim1, self.outputDim])
self.W = nn.Parameter(self.W)
self.B = torch.randn([self.outputDim])
self.B = nn.Parameter(self.B)
def getBrickedData(self, x, brickSize):
'''
Takes x of shape [timeSteps, batchSize, featureDim] and returns bricked
x of shape [numBricks, brickSize, batchSize, featureDim] by chunking
along 0-th axes.
'''
timeSteps = list(x.size())[0]
numSplits = int(timeSteps / brickSize)
batchSize = list(x.size())[1]
featureDim = list(x.size())[2]
numBricks = int(timeSteps/brickSize)
eqlen = numSplits * brickSize
x = x[:eqlen]
x_bricked = torch.split(x, numSplits, dim = 0)
x_bricked_batched = torch.cat(x_bricked)
x_bricked_batched = torch.reshape(x_bricked_batched, (numBricks,brickSize,batchSize,featureDim))
return x_bricked_batched
def forward(self, x, brickSize):
'''
x: Input data in numpy. Expected to be a 3D tensor with shape
[timeStep, batchSize, featureDim]. Note that this is different from
the convention followed in the TF codebase.
brickSize: The brick size for the lower dimension. The input data will
be divided into bricks along the timeStep axis (axis=0) internally
and fed into the lowest layer RNN. Note that if the last brick has
fewer than 'brickSize' steps, it will be ignored (no internal
padding is done).
'''
assert x.ndimension() == 3
assert list(x.size())[2] == self.inputDim
x_bricks = self.getBrickedData(x, brickSize)
# x bricks: [numBricks, brickSize, batchSize, featureDim]
x_bricks = x_bricks.permute(1,0,2,3)
# x bricks: [brickSize, numBricks, batchSize, featureDim]
oldShape = list(x_bricks.size())
x_bricks = torch.reshape(x_bricks, [oldShape[0], oldShape[1] * oldShape[2], oldShape[3]])
# x bricks: [brickSize, numBricks * batchSize, featureDim]
# x_bricks = torch.Tensor(x_bricks)
self.dropoutLayer0 = None
self.dropoutLayer1 = None
if self.cellType == 'LSTM':
hidd0, out0 = self.rnn0(x_bricks)
else:
hidd0 = self.rnn0(x_bricks)
if self.dropoutProbability0 != None:
self.dropoutLayer0 = nn.Dropout(p=self.dropoutProbability0)
hidd0 = self.dropoutLayer0(hidd0)
hidd0 = torch.squeeze(hidd0[-1])
# [numBricks * batchSize, hiddenDim0]
inp1 = hidd0.view(oldShape[1], oldShape[2], self.hiddenDim0)
# [numBricks, batchSize, hiddenDim0]
if self.cellType == 'LSTM':
hidd1, out1 = self.rnn1(inp1)
else:
hidd1 = self.rnn1(inp1)
if self.dropoutProbability1 != None:
self.dropoutLayer1 = nn.Dropout(p=self.dropoutProbability1)
hidd1 = self.dropoutLayer1(hidd1)
hidd1 = torch.squeeze(hidd1[-1])
out = torch.matmul(hidd1, self.W) + self.B
return out | 0.91385 | 0.68999 |
from typing import Callable, Dict, List
from unittest.mock import MagicMock
from flask_sqlalchemy import models_committed
from itsdangerous import URLSafeSerializer
from sqlalchemy.orm import scoped_session
from profiles.events import maintain_orcid_webhook
from profiles.models import OrcidToken, Profile
def test_it_has_a_valid_signal_handler_registered_on_app(registered_handler_names: List[str]):
assert 'webhook_maintainer' in registered_handler_names
def test_it_sets_a_webhook_when_a_profile_is_inserted(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer,
commit: Callable[[], None]):
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.add(profile)
commit()
assert mock_orcid_client.set_webhook.call_count == 1
assert mock_orcid_client.set_webhook.call_args[0][0] == '0000-0002-1825-0097'
assert mock_orcid_client.set_webhook.call_args[0][1] == 'http://localhost/orcid-webhook/{}' \
.format(url_safe_serializer.dumps('0000-0002-1825-0097'))
def test_it_sets_a_webhook_when_a_profile_is_updated(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer,
commit: Callable[[], None]):
session.add(profile)
commit()
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
profile.add_email_address('<EMAIL>')
session.add(profile)
commit()
assert mock_orcid_client.set_webhook.call_count == 1
assert mock_orcid_client.set_webhook.call_args[0][0] == '0000-0002-1825-0097'
assert mock_orcid_client.set_webhook.call_args[0][1] == 'http://localhost/orcid-webhook/{}' \
.format(url_safe_serializer.dumps('0000-0002-1825-0097'))
def test_it_will_remove_the_webhook_when_a_profile_is_deleted(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer:
URLSafeSerializer,
commit: Callable[[], None]):
session.add(profile)
commit()
mock_orcid_client.remove_webhook.side_effect = Exception('Some Exception')
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.delete(profile)
commit()
assert mock_orcid_client.remove_webhook.call_count == 1
def test_it_ignores_other_models_being_committed(orcid_token: OrcidToken,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer):
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.add(orcid_token)
session.commit()
assert mock_orcid_client.set_webhook.call_count == 0
assert mock_orcid_client.remove_webhook.call_count == 0
def test_exception_is_handled_by_catch_exception_decorator(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer,
commit: Callable[[], None]):
mock_orcid_client.remove_webhook.side_effect = Exception('Some Exception')
session.add(profile)
commit()
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.delete(profile)
commit()
assert mock_orcid_client.remove_webhook.call_count == 1 | test/events/test_maintain_orcid_webhook.py | from typing import Callable, Dict, List
from unittest.mock import MagicMock
from flask_sqlalchemy import models_committed
from itsdangerous import URLSafeSerializer
from sqlalchemy.orm import scoped_session
from profiles.events import maintain_orcid_webhook
from profiles.models import OrcidToken, Profile
def test_it_has_a_valid_signal_handler_registered_on_app(registered_handler_names: List[str]):
assert 'webhook_maintainer' in registered_handler_names
def test_it_sets_a_webhook_when_a_profile_is_inserted(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer,
commit: Callable[[], None]):
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.add(profile)
commit()
assert mock_orcid_client.set_webhook.call_count == 1
assert mock_orcid_client.set_webhook.call_args[0][0] == '0000-0002-1825-0097'
assert mock_orcid_client.set_webhook.call_args[0][1] == 'http://localhost/orcid-webhook/{}' \
.format(url_safe_serializer.dumps('0000-0002-1825-0097'))
def test_it_sets_a_webhook_when_a_profile_is_updated(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer,
commit: Callable[[], None]):
session.add(profile)
commit()
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
profile.add_email_address('<EMAIL>')
session.add(profile)
commit()
assert mock_orcid_client.set_webhook.call_count == 1
assert mock_orcid_client.set_webhook.call_args[0][0] == '0000-0002-1825-0097'
assert mock_orcid_client.set_webhook.call_args[0][1] == 'http://localhost/orcid-webhook/{}' \
.format(url_safe_serializer.dumps('0000-0002-1825-0097'))
def test_it_will_remove_the_webhook_when_a_profile_is_deleted(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer:
URLSafeSerializer,
commit: Callable[[], None]):
session.add(profile)
commit()
mock_orcid_client.remove_webhook.side_effect = Exception('Some Exception')
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.delete(profile)
commit()
assert mock_orcid_client.remove_webhook.call_count == 1
def test_it_ignores_other_models_being_committed(orcid_token: OrcidToken,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer):
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.add(orcid_token)
session.commit()
assert mock_orcid_client.set_webhook.call_count == 0
assert mock_orcid_client.remove_webhook.call_count == 0
def test_exception_is_handled_by_catch_exception_decorator(profile: Profile,
orcid_config: Dict[str, str],
mock_orcid_client: MagicMock,
session: scoped_session,
url_safe_serializer: URLSafeSerializer,
commit: Callable[[], None]):
mock_orcid_client.remove_webhook.side_effect = Exception('Some Exception')
session.add(profile)
commit()
webhook_maintainer = maintain_orcid_webhook(orcid_config, mock_orcid_client,
url_safe_serializer)
models_committed.connect(receiver=webhook_maintainer)
session.delete(profile)
commit()
assert mock_orcid_client.remove_webhook.call_count == 1 | 0.762247 | 0.224162 |
from pandas import read_csv as csv, DataFrame as df, merge
from matplotlib.pyplot import figure, scatter, boxplot, savefig, hist, get_cmap, text, title, xlabel, ylabel, hlines, legend
from seaborn import kdeplot
from seaborn import heatmap as hm
from warnings import filterwarnings as fw
from numpy import mean
from scipy.stats import kurtosis, skew, jarque_bera
fw('ignore')
all_loc = {"Kuningan" : 0, "Subang" : 1, "Citayam" : 2}
genotype = csv("data/RiceToolkit/app-master/data/X.csv")
genotype.rename(columns={'sample_index':'sample_id'}, inplace=True)
genotype.location.replace(all_loc, inplace=True)
snp_data = genotype[genotype.columns[2:]]
for i in list(snp_data.columns):
snp_data.loc[:, i] = snp_data[i].apply(lambda x: round(x), snp_data[i].tolist())
snp_data
figure(figsize=(10, 10))
hm(snp_data.corr(), cmap="viridis")
# savefig("result/snp_corr_heatmap.png", bbox_inches='tight', dpi=1200)
snp_dict = dict(zip([i for i in range(len(snp_data.columns))], list(snp_data.columns)))
genotype_ = df({
'sample_id': list(genotype.sample_id),
'location' : list(genotype.location),
'snps_id' : [list(snp_dict.keys()) for i in range(len(genotype))],
'snps': snp_data.values.tolist()
})
phenotype = csv("data/RiceToolkit/app-master/data/Y.csv")
phenotype.rename(columns={'sample_index':'sample_id', 'yield':'rice_yield'}, inplace=True)
phenotype.location.replace(all_loc, inplace=True)
phenotype = phenotype[phenotype.rice_yield!=0] # total zero values: 10
figure(figsize=(8, 2))
boxplot(phenotype.rice_yield, vert=False)
# savefig("boxplot.png")
q1, q3 = phenotype.rice_yield.quantile(0.25), phenotype.rice_yield.quantile(0.75)
iqr = q3 - q1 # Interquartile Range (IQR)
lif = q1 - (1.5 * iqr) # lower Inner Fence (LIF)
lof = q1 - (3 * iqr) # Lower Outer Fence (LOF)
uif = q3 + (1.5 * iqr) # Upper Inner Fence (UIF)
uof = q3 + (3 * iqr) # Upper Outer Fence (UOF)
glob_mean = mean(phenotype.loc[(phenotype.rice_yield >= lif) & (phenotype.rice_yield <= uif)].rice_yield)
mild_outlier = phenotype[((phenotype.rice_yield > uif) & (phenotype.rice_yield <= uof)) | ((phenotype.rice_yield < lif) & (phenotype.rice_yield >= lof))]
print("Total mild outlier(s): {}".format(len(mild_outlier)))
phenotype.loc[mild_outlier.index, "rice_yield"] = glob_mean
figure(figsize=(7.5, 5))
hlines(phenotype.rice_yield.describe()["mean"], -30, 730, color="k", linestyle="dashed", linewidth=3, label="mean")
hlines(q1, -30, 730, color="b", linestyle="dashed", linewidth=2, label="$Q_1$")
hlines(q3, -30, 730, color="r", linestyle="dashed", linewidth=2, label="$Q_3$")
scatter(list(phenotype.index), list(phenotype.rice_yield), c="xkcd:aquamarine")
scatter(list(mild_outlier.index), list(mild_outlier.rice_yield), c="xkcd:orange red")
legend()
title("Detected outliers in Indonesian rice yield dataset")
xlabel("Total samples")
ylabel("Yield (ton/ha)")
savefig("result/outliers.png", bbox_inches='tight', dpi=1000)
extreme_outlier = phenotype[(phenotype.rice_yield > uof) | (phenotype.rice_yield < lof)]
print("Total extreme outlier(s): {}".format(len(extreme_outlier)))
phenotype.loc[extreme_outlier.index, "rice_yield"] = glob_mean
extreme_outlier
# Yield distribution after outlier imputation
scatter(list(phenotype.index), list(phenotype.rice_yield))
sample = csv("data/raw-rice-data/ind-rg-samples.csv")
sample.drop(["Unnamed: 0", "sentrixposition", "id_source", "id_reg", "remarks"], axis=1, inplace=True)
print("Missing samples: {}".format(len(sample) - len(set(phenotype.sample_id.tolist()))))
def rename(inp, out, *args, **kwargs):
sample.name.replace(inp, out, inplace=True)
rename("37--Bio110-BC-Pir4", "37--Bio110-BC-Pir4 (BIOSA)")
rename("A1 / B1 (IR58025B)", "IR58025 A(CMS)-B(Maintener)")
rename("A2 / B2 (IR62829B)", "IR62829 A(CMS)-B(Maintener)")
rename("A3 / B3 (IR68885B)", "IR68885 A(CMS)-B(Maintener)")
rename("A4 / B4 (IR68886B)", "IR68886 A(CMS)-B(Maintener)")
rename("A5 / B5 (IR68888B)", "IR68888 A(CMS)-B(Maintener)")
rename("A6 / B6 (IR68897B)", "IR68897 A(CMS)-B(Maintener)")
rename("Ciherang-Sub1", "Ciherang + Sub1")
rename("IR 64 (kontrol indica))", "IR 64 (kontrol indica)")
rename("IR72a", "IR35366 (IR72)")
rename("Kinamaze (kontrol japonica)", "Kinamaze ")
rename("O. barthii 104384", "O. barthii ")
rename("O. glaberima 100156", "O. glaberima ")
rename("O. glaberima 10194", "O. glaberima")
rename("PK12 (S4325D-1-2-3-1)", "S4325D-1-2-3-1")
rename("PK21 (BP51-1)", "BP51-1")
rename("R14 (IR40750-82-2-2-3)", "IR40750-82-2-2-3")
rename("R2 (IR53942)", "IR53942")
rename("R3 (MTU53942)", "MTU 9992")
rename("R32 (BR158-2B-23)", "BR 168-2B-23")
rename("RH", "R<NAME> (Acc. No. 11730)")
rename("SWAR2", "Swarnalata2")
sample_idx = dict(zip(list(map(lambda x: x+1, list(sample.index))), list(sample.name)))
missing_samples_key = set(sample["index"].tolist()) - set(phenotype.sample_id.tolist())
missing_samples_name = {k: sample_idx[k] for k in missing_samples_key if k in sample_idx}
gp_table = merge(genotype_, phenotype, how="inner")
gp_table.rename(columns={'sample_id':'sample_name'}, inplace=True)
gp_table.insert(0, "sample_id", gp_table.sample_name)
gp_table.sample_name.replace(sample_idx, inplace=True)
gp_table.rice_yield.describe()
# Recast GP Table to fit the Statsmodels parameter
gp_table_2 = snp_data.loc[list(gp_table.sample_id)]
gp_table_2 = gp_table_2.reset_index()
gp_table_2.drop(columns="index", inplace=True)
gp_table_2.loc[:, "location"] = gp_table.location
gp_table_2.loc[:, "variety"] = gp_table.sample_id
gp_table_2.loc[:, "rice_yield"] = gp_table.rice_yield
gp_table.rice_yield.describe()
# Advanced Data Description
# * Location
# * Total sample
# * Desc stats
# * Skewness coef.
# * Kurtosis coef.
kuningan = gp_table[gp_table.location==0]
subang = gp_table[gp_table.location==1]
citayam = gp_table[gp_table.location==2]
def plot_dist(data, save=False, save_name="", *args, **kwargs):
figure(figsize=(8, 5))
N, bins, patches = hist(data["rice_yield"], 20, density=True, edgecolor="white")
jet = get_cmap('jet', len(patches))
kdeplot(data["rice_yield"], color="k", lw=1.5)
print("skewness coef.\t {}".format(skew(data["rice_yield"])))
print("kurtosis coef.\t {}".format(kurtosis(data["rice_yield"])))
print("jarque bera test stats.\t {}".format(jarque_bera(data["rice_yield"]).statistic))
print("jarque bera pvalue\t {}".format(jarque_bera(data["rice_yield"]).pvalue))
print(data["rice_yield"].describe())
for i in range(len(patches)):
patches[i].set_facecolor(jet(i))
if save==True:
savefig("result/rice_yield_distplot_{}.png".format(save_name), bbox_inches='tight', dpi=2000)
plot_dist(gp_table_2, True, "all")
plot_dist(kuningan, True, "kuningan")
plot_dist(subang, True, "subang")
plot_dist(citayam, True, "citayam")���� | GenotypePhenotypeTable/GP Table (outliers detection and distribution plot).py | from pandas import read_csv as csv, DataFrame as df, merge
from matplotlib.pyplot import figure, scatter, boxplot, savefig, hist, get_cmap, text, title, xlabel, ylabel, hlines, legend
from seaborn import kdeplot
from seaborn import heatmap as hm
from warnings import filterwarnings as fw
from numpy import mean
from scipy.stats import kurtosis, skew, jarque_bera
fw('ignore')
all_loc = {"Kuningan" : 0, "Subang" : 1, "Citayam" : 2}
genotype = csv("data/RiceToolkit/app-master/data/X.csv")
genotype.rename(columns={'sample_index':'sample_id'}, inplace=True)
genotype.location.replace(all_loc, inplace=True)
snp_data = genotype[genotype.columns[2:]]
for i in list(snp_data.columns):
snp_data.loc[:, i] = snp_data[i].apply(lambda x: round(x), snp_data[i].tolist())
snp_data
figure(figsize=(10, 10))
hm(snp_data.corr(), cmap="viridis")
# savefig("result/snp_corr_heatmap.png", bbox_inches='tight', dpi=1200)
snp_dict = dict(zip([i for i in range(len(snp_data.columns))], list(snp_data.columns)))
genotype_ = df({
'sample_id': list(genotype.sample_id),
'location' : list(genotype.location),
'snps_id' : [list(snp_dict.keys()) for i in range(len(genotype))],
'snps': snp_data.values.tolist()
})
phenotype = csv("data/RiceToolkit/app-master/data/Y.csv")
phenotype.rename(columns={'sample_index':'sample_id', 'yield':'rice_yield'}, inplace=True)
phenotype.location.replace(all_loc, inplace=True)
phenotype = phenotype[phenotype.rice_yield!=0] # total zero values: 10
figure(figsize=(8, 2))
boxplot(phenotype.rice_yield, vert=False)
# savefig("boxplot.png")
q1, q3 = phenotype.rice_yield.quantile(0.25), phenotype.rice_yield.quantile(0.75)
iqr = q3 - q1 # Interquartile Range (IQR)
lif = q1 - (1.5 * iqr) # lower Inner Fence (LIF)
lof = q1 - (3 * iqr) # Lower Outer Fence (LOF)
uif = q3 + (1.5 * iqr) # Upper Inner Fence (UIF)
uof = q3 + (3 * iqr) # Upper Outer Fence (UOF)
glob_mean = mean(phenotype.loc[(phenotype.rice_yield >= lif) & (phenotype.rice_yield <= uif)].rice_yield)
mild_outlier = phenotype[((phenotype.rice_yield > uif) & (phenotype.rice_yield <= uof)) | ((phenotype.rice_yield < lif) & (phenotype.rice_yield >= lof))]
print("Total mild outlier(s): {}".format(len(mild_outlier)))
phenotype.loc[mild_outlier.index, "rice_yield"] = glob_mean
figure(figsize=(7.5, 5))
hlines(phenotype.rice_yield.describe()["mean"], -30, 730, color="k", linestyle="dashed", linewidth=3, label="mean")
hlines(q1, -30, 730, color="b", linestyle="dashed", linewidth=2, label="$Q_1$")
hlines(q3, -30, 730, color="r", linestyle="dashed", linewidth=2, label="$Q_3$")
scatter(list(phenotype.index), list(phenotype.rice_yield), c="xkcd:aquamarine")
scatter(list(mild_outlier.index), list(mild_outlier.rice_yield), c="xkcd:orange red")
legend()
title("Detected outliers in Indonesian rice yield dataset")
xlabel("Total samples")
ylabel("Yield (ton/ha)")
savefig("result/outliers.png", bbox_inches='tight', dpi=1000)
extreme_outlier = phenotype[(phenotype.rice_yield > uof) | (phenotype.rice_yield < lof)]
print("Total extreme outlier(s): {}".format(len(extreme_outlier)))
phenotype.loc[extreme_outlier.index, "rice_yield"] = glob_mean
extreme_outlier
# Yield distribution after outlier imputation
scatter(list(phenotype.index), list(phenotype.rice_yield))
sample = csv("data/raw-rice-data/ind-rg-samples.csv")
sample.drop(["Unnamed: 0", "sentrixposition", "id_source", "id_reg", "remarks"], axis=1, inplace=True)
print("Missing samples: {}".format(len(sample) - len(set(phenotype.sample_id.tolist()))))
def rename(inp, out, *args, **kwargs):
sample.name.replace(inp, out, inplace=True)
rename("37--Bio110-BC-Pir4", "37--Bio110-BC-Pir4 (BIOSA)")
rename("A1 / B1 (IR58025B)", "IR58025 A(CMS)-B(Maintener)")
rename("A2 / B2 (IR62829B)", "IR62829 A(CMS)-B(Maintener)")
rename("A3 / B3 (IR68885B)", "IR68885 A(CMS)-B(Maintener)")
rename("A4 / B4 (IR68886B)", "IR68886 A(CMS)-B(Maintener)")
rename("A5 / B5 (IR68888B)", "IR68888 A(CMS)-B(Maintener)")
rename("A6 / B6 (IR68897B)", "IR68897 A(CMS)-B(Maintener)")
rename("Ciherang-Sub1", "Ciherang + Sub1")
rename("IR 64 (kontrol indica))", "IR 64 (kontrol indica)")
rename("IR72a", "IR35366 (IR72)")
rename("Kinamaze (kontrol japonica)", "Kinamaze ")
rename("O. barthii 104384", "O. barthii ")
rename("O. glaberima 100156", "O. glaberima ")
rename("O. glaberima 10194", "O. glaberima")
rename("PK12 (S4325D-1-2-3-1)", "S4325D-1-2-3-1")
rename("PK21 (BP51-1)", "BP51-1")
rename("R14 (IR40750-82-2-2-3)", "IR40750-82-2-2-3")
rename("R2 (IR53942)", "IR53942")
rename("R3 (MTU53942)", "MTU 9992")
rename("R32 (BR158-2B-23)", "BR 168-2B-23")
rename("RH", "R<NAME> (Acc. No. 11730)")
rename("SWAR2", "Swarnalata2")
sample_idx = dict(zip(list(map(lambda x: x+1, list(sample.index))), list(sample.name)))
missing_samples_key = set(sample["index"].tolist()) - set(phenotype.sample_id.tolist())
missing_samples_name = {k: sample_idx[k] for k in missing_samples_key if k in sample_idx}
gp_table = merge(genotype_, phenotype, how="inner")
gp_table.rename(columns={'sample_id':'sample_name'}, inplace=True)
gp_table.insert(0, "sample_id", gp_table.sample_name)
gp_table.sample_name.replace(sample_idx, inplace=True)
gp_table.rice_yield.describe()
# Recast GP Table to fit the Statsmodels parameter
gp_table_2 = snp_data.loc[list(gp_table.sample_id)]
gp_table_2 = gp_table_2.reset_index()
gp_table_2.drop(columns="index", inplace=True)
gp_table_2.loc[:, "location"] = gp_table.location
gp_table_2.loc[:, "variety"] = gp_table.sample_id
gp_table_2.loc[:, "rice_yield"] = gp_table.rice_yield
gp_table.rice_yield.describe()
# Advanced Data Description
# * Location
# * Total sample
# * Desc stats
# * Skewness coef.
# * Kurtosis coef.
kuningan = gp_table[gp_table.location==0]
subang = gp_table[gp_table.location==1]
citayam = gp_table[gp_table.location==2]
def plot_dist(data, save=False, save_name="", *args, **kwargs):
figure(figsize=(8, 5))
N, bins, patches = hist(data["rice_yield"], 20, density=True, edgecolor="white")
jet = get_cmap('jet', len(patches))
kdeplot(data["rice_yield"], color="k", lw=1.5)
print("skewness coef.\t {}".format(skew(data["rice_yield"])))
print("kurtosis coef.\t {}".format(kurtosis(data["rice_yield"])))
print("jarque bera test stats.\t {}".format(jarque_bera(data["rice_yield"]).statistic))
print("jarque bera pvalue\t {}".format(jarque_bera(data["rice_yield"]).pvalue))
print(data["rice_yield"].describe())
for i in range(len(patches)):
patches[i].set_facecolor(jet(i))
if save==True:
savefig("result/rice_yield_distplot_{}.png".format(save_name), bbox_inches='tight', dpi=2000)
plot_dist(gp_table_2, True, "all")
plot_dist(kuningan, True, "kuningan")
plot_dist(subang, True, "subang")
plot_dist(citayam, True, "citayam")���� | 0.330363 | 0.233876 |
def owner_only(func):
"Decorator for owner-only command methods."
func.owner_only = True
return func
def director_only(func):
"Decorator for director-only command methods."
func.director_only = True
return func
def coder_only(func):
"Decorator for coder-only command methods."
func.coder_only = True
return func
def admin_only(func):
"Decorator for admin-only command methods."
func.admin_only = True
return func
def mod_only(func):
"Decorator for mod-only command methods."
func.mod_only = True
return func
def member_only(func):
"Decorator for member-only command methods."
func.member_only = True
return func
def worldowner_only(func):
"Decorator for worldowner-only command methods."
func.worldowner_only = True
return func
def op_only(func):
"Decorator for op-only command methods."
func.op_only = True
return func
def builder_only(func):
"Decorator for builder-only command methods."
func.builder_only = True
return func
def unsilenced_only(func):
"Decorator for unsilenced-only command methods."
func.unsilenced_only = True
return func
def build_list(func):
"Decorator for build-list category methods."
func.build_list = True
return func
def world_list(func):
"Decorator for world-list category methods."
func.world_list = True
return func
def player_list(func):
"Decorator for player-list category methods."
func.player_list = True
return func
def info_list(func):
"Decorator for info-list category methods."
func.info_list = True
return func
def username_command(func):
"Decorator for commands that accept a single username parameter, and need a Client"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a username.")
else:
user = self.client.msgfindUserPartial(parts[1])
if user != None:
if len(parts) > 2:
try:
func(self, user, fromloc, overriderank, parts[2:])
except:
self.client.sendServerMessage("You specificed too many arguments.")
else:
func(self, user, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
def only_string_command(string_name):
def only_inner(func):
"Decorator for commands that accept a single username/plugin/etc parameter, and don't need it checked"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a %s." % string_name)
else:
username = parts[1].lower()
func(self, username, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
return only_inner
only_username_command = only_string_command("username")
def only_partialusername_command(func):
"Decorator for commands that accept only a username, which can be just part of a full name"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a username.")
else:
name = parts[1].lower()
# Try to match as a full name first.
if name not in self.client.factory.usernames:
# Build a list of any partial matches.
matches = []
for username in self.client.factory.usernames:
if name in username:
matches.append(username)
if len(matches)==0:
self.client.sendServerMessage("No such user '%s' (3+ chars?)" % name)
return
elif len(matches) > 1:
self.client.sendServerMessage("'%s' matches multiple users. Be more specific." % name)
return
else:
name = matches[0]
func(self, name, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
def username_world_command(func):
"Decorator for commands that accept a single username parameter and possibly a world name."
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a username.")
else:
username = parts[1].lower()
if len(parts) == 3:
try:
world = self.client.factory.worlds[parts[2].lower()]
except KeyError:
self.client.sendServerMessage("Unknown world '%s'." % parts[2].lower())
return
else:
world = self.client.world
func(self, username, world, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
def on_off_command(func):
"Decorator for commands that accept a single on/off parameter"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please use '%s on' or '%s off'." % (parts[0], parts[0]))
else:
if parts[1].lower() not in ["on", "off"]:
self.client.sendServerMessage("Use 'on' or 'off', not '%s'" % parts[1])
else:
func(self, parts[1].lower(), fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner | core/decorators.py | def owner_only(func):
"Decorator for owner-only command methods."
func.owner_only = True
return func
def director_only(func):
"Decorator for director-only command methods."
func.director_only = True
return func
def coder_only(func):
"Decorator for coder-only command methods."
func.coder_only = True
return func
def admin_only(func):
"Decorator for admin-only command methods."
func.admin_only = True
return func
def mod_only(func):
"Decorator for mod-only command methods."
func.mod_only = True
return func
def member_only(func):
"Decorator for member-only command methods."
func.member_only = True
return func
def worldowner_only(func):
"Decorator for worldowner-only command methods."
func.worldowner_only = True
return func
def op_only(func):
"Decorator for op-only command methods."
func.op_only = True
return func
def builder_only(func):
"Decorator for builder-only command methods."
func.builder_only = True
return func
def unsilenced_only(func):
"Decorator for unsilenced-only command methods."
func.unsilenced_only = True
return func
def build_list(func):
"Decorator for build-list category methods."
func.build_list = True
return func
def world_list(func):
"Decorator for world-list category methods."
func.world_list = True
return func
def player_list(func):
"Decorator for player-list category methods."
func.player_list = True
return func
def info_list(func):
"Decorator for info-list category methods."
func.info_list = True
return func
def username_command(func):
"Decorator for commands that accept a single username parameter, and need a Client"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a username.")
else:
user = self.client.msgfindUserPartial(parts[1])
if user != None:
if len(parts) > 2:
try:
func(self, user, fromloc, overriderank, parts[2:])
except:
self.client.sendServerMessage("You specificed too many arguments.")
else:
func(self, user, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
def only_string_command(string_name):
def only_inner(func):
"Decorator for commands that accept a single username/plugin/etc parameter, and don't need it checked"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a %s." % string_name)
else:
username = parts[1].lower()
func(self, username, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
return only_inner
only_username_command = only_string_command("username")
def only_partialusername_command(func):
"Decorator for commands that accept only a username, which can be just part of a full name"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a username.")
else:
name = parts[1].lower()
# Try to match as a full name first.
if name not in self.client.factory.usernames:
# Build a list of any partial matches.
matches = []
for username in self.client.factory.usernames:
if name in username:
matches.append(username)
if len(matches)==0:
self.client.sendServerMessage("No such user '%s' (3+ chars?)" % name)
return
elif len(matches) > 1:
self.client.sendServerMessage("'%s' matches multiple users. Be more specific." % name)
return
else:
name = matches[0]
func(self, name, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
def username_world_command(func):
"Decorator for commands that accept a single username parameter and possibly a world name."
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please specify a username.")
else:
username = parts[1].lower()
if len(parts) == 3:
try:
world = self.client.factory.worlds[parts[2].lower()]
except KeyError:
self.client.sendServerMessage("Unknown world '%s'." % parts[2].lower())
return
else:
world = self.client.world
func(self, username, world, fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner
def on_off_command(func):
"Decorator for commands that accept a single on/off parameter"
def inner(self, parts, fromloc, overriderank):
if len(parts) == 1:
self.client.sendServerMessage("Please use '%s on' or '%s off'." % (parts[0], parts[0]))
else:
if parts[1].lower() not in ["on", "off"]:
self.client.sendServerMessage("Use 'on' or 'off', not '%s'" % parts[1])
else:
func(self, parts[1].lower(), fromloc, overriderank)
inner.__doc__ = func.__doc__
return inner | 0.405449 | 0.094678 |
import pandas as pd
import seaborn as sns
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import os
import re
SAMPLE_FILE = snakemake.input[0]
EXTRACTION_PROFILE = snakemake.input[1]
POPCLUSTERING_FILE = snakemake.input[2]
TARGETS_FILE = snakemake.input[3]
OUT_DIR = snakemake.params.dir
# threshold for reporting samples with low number of reads:
# proportion of mean totalMatching reads
PROP_READ_COUNT = 0.1
# threshold for reporting samples with
# low proportion of reads retained after filtering
PROP_RETAINED = 0.1
def plot_heatmap(data, title, fname, **kwargs):
if 'cmap' not in kwargs:
kwargs['cmap']='coolwarm_r'
# grid_kws = {'width_ratios': (0.9, 0.03), 'wspace': 0.18}
# fig, (ax, cbar_ax) = plt.subplots(1, 2, gridspec_kw=grid_kws, figsize=(18,10))
fig_width = data.shape[1] / 4 # samples
fig_height = data.shape[0] / 5 # targets
fig, ax = plt.subplots(1, 1, figsize=(fig_width, fig_height))
sns.heatmap(data,
ax=ax,
**kwargs)
ax.set_title(title)
plt.tight_layout()
plt.savefig(os.path.join(OUT_DIR, fname), dpi=150)
def msg(msg):
with open(os.path.join(OUT_DIR, 'summary.txt'),
mode='a') as o:
o.write(msg + '\n')
# sample metadata
sample_meta = pd.read_csv(SAMPLE_FILE, dtype={'Source_sample':'str'})
# SeekDeep extraction profile
extr_data = pd.read_csv(EXTRACTION_PROFILE)
# merged SeekDeep popClustering tables
pop_data = pd.read_csv(POPCLUSTERING_FILE)
# read counts prep
read_per_sample = extr_data.groupby(by=['s_Sample', 'target']).sum()
read_per_sample['final'] = pop_data.groupby(by=['s_Sample', 'target'])['c_ReadCnt'].sum()
read_per_sample['final'] = read_per_sample['final'].fillna(0).astype(int)
read_per_sample['failedClustering'] = read_per_sample.good - read_per_sample.final
read_per_sample.drop(columns=['good', 'bad'], inplace=True)
# all samples and targets in the experiment
all_samples = sample_meta['Source_sample'].unique()
all_targets = pd.read_csv(TARGETS_FILE, sep='\t', dtype='str')['target'].unique()
def fill_sample_target(df, all_samples=all_samples, all_targets=all_targets):
'''
Add NA-only rows and columns to the pandas.DataFrame where
samples are columns and targets are rows
'''
df.columns = df.columns.map(str)
df.index = df.index.map(str)
for col in all_samples:
if col not in df.columns:
df[col] = np.nan
for row in all_targets:
if row not in df.index:
df = df.append(pd.Series(name=row))
df = df.sort_index(axis=0)
df = df.sort_index(axis=1)
return df
# initial merged read counts (i.e., matching at least one primer)
total_reads = read_per_sample \
.reset_index() \
.pivot(index='target', columns='s_Sample', values='totalMatching')
plot_heatmap(np.log10(fill_sample_target(total_reads)),
title="Initial reads per sample per target (log10)",
fname="reads_initial.pdf")
# sucess rate indicates percentages of losses during all stages of extraction and clustering
success_rate = (read_per_sample.final / read_per_sample.totalMatching) \
.reset_index() \
.pivot(index='target', columns='s_Sample', values=0)
plot_heatmap(fill_sample_target(success_rate),
title="Proportion of reads passing all filters per sample per target",
fname="filter_rate.pdf",
center=0.5)
# final read counts per sample per target
final_reads = read_per_sample.reset_index() \
.pivot(index='target', columns='s_Sample', values='final')
# replace zeroes with small number for logscale conversion
final_reads = final_reads.replace(0, 0.009)
# resulting colours:
# - NaN - no reads initially, white
# - 0.009 - all reads removed, red
# - >=1 - some reads retained, grey to blue
plot_heatmap(np.log10(fill_sample_target(final_reads)),
title="Final reads per sample per target (log10); red - all reads removed",
fname="reads_final.pdf",
center=0)
# per-amplicon failure reasons
ampl_data = read_per_sample.groupby(by='target').sum()
for col in ampl_data.drop(columns='totalMatching'):
ampl_data[col+'_pc'] = ampl_data[col]/ampl_data['totalMatching']
fig, ax = plt.subplots(1, 1, figsize=(18, 3))
ampl_data[['final_pc',
'failedClustering_pc',
'failedQuality_pc',
'failedPairProcessing_pc',
'failedMinLen_pc',
'failedMaxLen_pc',
'failedNs_pc',
'failedPossibleContamination_pc']] \
.plot(kind='bar',
stacked=True,
ax=ax,
title='Read status breakdown per amplicon')
plt.tight_layout()
plt.savefig(os.path.join(OUT_DIR, "filter_per_amplicon.pdf"), dpi=150)
# per-sample failure reasons
sample_data = read_per_sample.groupby(by='s_Sample').sum()
for col in sample_data.drop(columns='totalMatching'):
sample_data[col+'_pc'] = sample_data[col]/sample_data['totalMatching']
# split into two panes assuming about 100 samples per batch
split_idx = sample_data.shape[0]//2
d1 = sample_data.iloc[:split_idx, :]
d2 = sample_data.iloc[split_idx:, :]
# plot - size optimized for 96 samples
fig, axs = plt.subplots(2,1,figsize=(12, 8))
for (i,d) in enumerate([d1, d2]):
axs[i] = d[['final_pc',
'failedClustering_pc',
'failedQuality_pc',
'failedPairProcessing_pc',
'failedMinLen_pc',
'failedMaxLen_pc',
'failedNs_pc',
'failedPossibleContamination_pc']] \
.plot(kind='bar',
ax=axs[i],
legend=(True if i==0 else False),
stacked=True,
title=('Read status breakdown per sample' if i==0 else ''))
plt.tight_layout()
plt.savefig(os.path.join(OUT_DIR, "filter_per_sample.pdf"), dpi=150)
# text summary
# read counts
low_yield_cutoff = sample_data.totalMatching.mean() * PROP_READ_COUNT
low_yield_samples = sample_data.loc[sample_data.totalMatching <= low_yield_cutoff,
['totalMatching']]
msg('Samples with low number of reads matching primers. Cutoff: {}'.format(low_yield_cutoff))
msg(low_yield_samples.to_string() + '\n')
# filtering
low_retained_samples = sample_data.loc[sample_data.final_pc <= PROP_RETAINED,
['totalMatching', 'final', 'final_pc']]
msg('Samples with at most {} reads retained after filtering'.format(PROP_RETAINED))
msg(low_retained_samples.to_string() + '\n')
# allele counts (TODO - discrete color bar)
allele_counts = pop_data.groupby(['s_Sample', 'target'], as_index=False).count()\
.pivot(index='target', columns='s_Sample', values='h_popUID') \
.fillna(0)
max_allele_count = allele_counts.max().max().astype(int) + 1
plot_heatmap(fill_sample_target(allele_counts),
title="Alleles per sample per target",
fname="allele_counts.pdf",
cmap="coolwarm",
center=2,
cbar_kws=dict(ticks=range(max_allele_count)))
# major allele frequency
major_hap_freq = pop_data.groupby(['s_Sample', 'target'], as_index=False)['c_AveragedFrac'].max()\
.pivot(index='target', columns='s_Sample', values='c_AveragedFrac')
# ignore perfectly homozygous samples
major_hap_freq = major_hap_freq.replace(1, np.nan)
# plot only if any heterozygous sites exist
if ~major_hap_freq.isna().all(axis=None):
plot_heatmap(fill_sample_target(major_hap_freq),
title="Major allele imbalance: red - over 0.5, blue - below 0.5",
fname="allele_imbalance.pdf",
cmap="coolwarm",
center=0.5)
else:
print('No heterozygous calls')
# allele frequencies versus read counts
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
ax.scatter(x=pop_data.c_AveragedFrac, y=np.log10(pop_data.c_ReadCnt.astype(float)), alpha=0.1)
plt.xlabel('Allele fraction')
plt.ylabel('Read count, log10')
plt.savefig(os.path.join(OUT_DIR, "allele_freq_cov.pdf"), dpi=150) | pipeline_seekdeep/scripts/basic_qc.py | import pandas as pd
import seaborn as sns
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import os
import re
SAMPLE_FILE = snakemake.input[0]
EXTRACTION_PROFILE = snakemake.input[1]
POPCLUSTERING_FILE = snakemake.input[2]
TARGETS_FILE = snakemake.input[3]
OUT_DIR = snakemake.params.dir
# threshold for reporting samples with low number of reads:
# proportion of mean totalMatching reads
PROP_READ_COUNT = 0.1
# threshold for reporting samples with
# low proportion of reads retained after filtering
PROP_RETAINED = 0.1
def plot_heatmap(data, title, fname, **kwargs):
if 'cmap' not in kwargs:
kwargs['cmap']='coolwarm_r'
# grid_kws = {'width_ratios': (0.9, 0.03), 'wspace': 0.18}
# fig, (ax, cbar_ax) = plt.subplots(1, 2, gridspec_kw=grid_kws, figsize=(18,10))
fig_width = data.shape[1] / 4 # samples
fig_height = data.shape[0] / 5 # targets
fig, ax = plt.subplots(1, 1, figsize=(fig_width, fig_height))
sns.heatmap(data,
ax=ax,
**kwargs)
ax.set_title(title)
plt.tight_layout()
plt.savefig(os.path.join(OUT_DIR, fname), dpi=150)
def msg(msg):
with open(os.path.join(OUT_DIR, 'summary.txt'),
mode='a') as o:
o.write(msg + '\n')
# sample metadata
sample_meta = pd.read_csv(SAMPLE_FILE, dtype={'Source_sample':'str'})
# SeekDeep extraction profile
extr_data = pd.read_csv(EXTRACTION_PROFILE)
# merged SeekDeep popClustering tables
pop_data = pd.read_csv(POPCLUSTERING_FILE)
# read counts prep
read_per_sample = extr_data.groupby(by=['s_Sample', 'target']).sum()
read_per_sample['final'] = pop_data.groupby(by=['s_Sample', 'target'])['c_ReadCnt'].sum()
read_per_sample['final'] = read_per_sample['final'].fillna(0).astype(int)
read_per_sample['failedClustering'] = read_per_sample.good - read_per_sample.final
read_per_sample.drop(columns=['good', 'bad'], inplace=True)
# all samples and targets in the experiment
all_samples = sample_meta['Source_sample'].unique()
all_targets = pd.read_csv(TARGETS_FILE, sep='\t', dtype='str')['target'].unique()
def fill_sample_target(df, all_samples=all_samples, all_targets=all_targets):
'''
Add NA-only rows and columns to the pandas.DataFrame where
samples are columns and targets are rows
'''
df.columns = df.columns.map(str)
df.index = df.index.map(str)
for col in all_samples:
if col not in df.columns:
df[col] = np.nan
for row in all_targets:
if row not in df.index:
df = df.append(pd.Series(name=row))
df = df.sort_index(axis=0)
df = df.sort_index(axis=1)
return df
# initial merged read counts (i.e., matching at least one primer)
total_reads = read_per_sample \
.reset_index() \
.pivot(index='target', columns='s_Sample', values='totalMatching')
plot_heatmap(np.log10(fill_sample_target(total_reads)),
title="Initial reads per sample per target (log10)",
fname="reads_initial.pdf")
# sucess rate indicates percentages of losses during all stages of extraction and clustering
success_rate = (read_per_sample.final / read_per_sample.totalMatching) \
.reset_index() \
.pivot(index='target', columns='s_Sample', values=0)
plot_heatmap(fill_sample_target(success_rate),
title="Proportion of reads passing all filters per sample per target",
fname="filter_rate.pdf",
center=0.5)
# final read counts per sample per target
final_reads = read_per_sample.reset_index() \
.pivot(index='target', columns='s_Sample', values='final')
# replace zeroes with small number for logscale conversion
final_reads = final_reads.replace(0, 0.009)
# resulting colours:
# - NaN - no reads initially, white
# - 0.009 - all reads removed, red
# - >=1 - some reads retained, grey to blue
plot_heatmap(np.log10(fill_sample_target(final_reads)),
title="Final reads per sample per target (log10); red - all reads removed",
fname="reads_final.pdf",
center=0)
# per-amplicon failure reasons
ampl_data = read_per_sample.groupby(by='target').sum()
for col in ampl_data.drop(columns='totalMatching'):
ampl_data[col+'_pc'] = ampl_data[col]/ampl_data['totalMatching']
fig, ax = plt.subplots(1, 1, figsize=(18, 3))
ampl_data[['final_pc',
'failedClustering_pc',
'failedQuality_pc',
'failedPairProcessing_pc',
'failedMinLen_pc',
'failedMaxLen_pc',
'failedNs_pc',
'failedPossibleContamination_pc']] \
.plot(kind='bar',
stacked=True,
ax=ax,
title='Read status breakdown per amplicon')
plt.tight_layout()
plt.savefig(os.path.join(OUT_DIR, "filter_per_amplicon.pdf"), dpi=150)
# per-sample failure reasons
sample_data = read_per_sample.groupby(by='s_Sample').sum()
for col in sample_data.drop(columns='totalMatching'):
sample_data[col+'_pc'] = sample_data[col]/sample_data['totalMatching']
# split into two panes assuming about 100 samples per batch
split_idx = sample_data.shape[0]//2
d1 = sample_data.iloc[:split_idx, :]
d2 = sample_data.iloc[split_idx:, :]
# plot - size optimized for 96 samples
fig, axs = plt.subplots(2,1,figsize=(12, 8))
for (i,d) in enumerate([d1, d2]):
axs[i] = d[['final_pc',
'failedClustering_pc',
'failedQuality_pc',
'failedPairProcessing_pc',
'failedMinLen_pc',
'failedMaxLen_pc',
'failedNs_pc',
'failedPossibleContamination_pc']] \
.plot(kind='bar',
ax=axs[i],
legend=(True if i==0 else False),
stacked=True,
title=('Read status breakdown per sample' if i==0 else ''))
plt.tight_layout()
plt.savefig(os.path.join(OUT_DIR, "filter_per_sample.pdf"), dpi=150)
# text summary
# read counts
low_yield_cutoff = sample_data.totalMatching.mean() * PROP_READ_COUNT
low_yield_samples = sample_data.loc[sample_data.totalMatching <= low_yield_cutoff,
['totalMatching']]
msg('Samples with low number of reads matching primers. Cutoff: {}'.format(low_yield_cutoff))
msg(low_yield_samples.to_string() + '\n')
# filtering
low_retained_samples = sample_data.loc[sample_data.final_pc <= PROP_RETAINED,
['totalMatching', 'final', 'final_pc']]
msg('Samples with at most {} reads retained after filtering'.format(PROP_RETAINED))
msg(low_retained_samples.to_string() + '\n')
# allele counts (TODO - discrete color bar)
allele_counts = pop_data.groupby(['s_Sample', 'target'], as_index=False).count()\
.pivot(index='target', columns='s_Sample', values='h_popUID') \
.fillna(0)
max_allele_count = allele_counts.max().max().astype(int) + 1
plot_heatmap(fill_sample_target(allele_counts),
title="Alleles per sample per target",
fname="allele_counts.pdf",
cmap="coolwarm",
center=2,
cbar_kws=dict(ticks=range(max_allele_count)))
# major allele frequency
major_hap_freq = pop_data.groupby(['s_Sample', 'target'], as_index=False)['c_AveragedFrac'].max()\
.pivot(index='target', columns='s_Sample', values='c_AveragedFrac')
# ignore perfectly homozygous samples
major_hap_freq = major_hap_freq.replace(1, np.nan)
# plot only if any heterozygous sites exist
if ~major_hap_freq.isna().all(axis=None):
plot_heatmap(fill_sample_target(major_hap_freq),
title="Major allele imbalance: red - over 0.5, blue - below 0.5",
fname="allele_imbalance.pdf",
cmap="coolwarm",
center=0.5)
else:
print('No heterozygous calls')
# allele frequencies versus read counts
fig, ax = plt.subplots(1, 1, figsize=(6, 4))
ax.scatter(x=pop_data.c_AveragedFrac, y=np.log10(pop_data.c_ReadCnt.astype(float)), alpha=0.1)
plt.xlabel('Allele fraction')
plt.ylabel('Read count, log10')
plt.savefig(os.path.join(OUT_DIR, "allele_freq_cov.pdf"), dpi=150) | 0.515864 | 0.371165 |
import numpy as np
import pickle
import os, sys, argparse
from util.tables import *
from collections import defaultdict
import pandas as pd
path = f'./results/regression'
_, _, filenames = next(os.walk(path))
method_types = ['LLL', 'KFL', 'SWAG', 'SVGP']
rgpr_suffix = '-RGPR'
datasets = ['boston_housing', 'concrete', 'energy', 'wine']
FARAWAY = 'FarAway'
TEXTBF = '\\textbf'
# ========================== Error-bars ====================================
values = defaultdict(list)
for dset in datasets:
stds = []
def cond(fname, str):
return f'_{dset.lower()}_' in fname and str in fname
for fname in [fname for fname in filenames if cond(fname, '_std_')]:
with open(f'{path}/{fname}', 'rb') as f:
d = pickle.load(f)
stds.append(pd.DataFrame(d))
df_std = pd.concat(stds, ignore_index=False)
df_std_mean = df_std.groupby(df_std.index).mean()
for method_type in method_types:
mean_vanilla = df_std_mean[method_type][dset]
mean_rgp = df_std_mean[method_type+rgpr_suffix][dset]
# bold_vanilla = mean_vanilla <= mean_rgp
# bold_rgp = mean_rgp <= mean_vanilla
bold_vanilla = False
bold_rgp = False
str_vanilla = f'\\textbf{{{mean_vanilla:.3f}}}' if bold_vanilla else f'{mean_vanilla:.3f}'
str_rgp = f'\\textbf{{{mean_rgp:.3f}}}' if bold_rgp else f'{mean_rgp:.3f}'
values[method_type].append(str_vanilla)
values[method_type+rgpr_suffix].append(str_rgp)
for method_type in method_types:
mean_vanilla = df_std_mean[method_type][FARAWAY]
mean_rgp = df_std_mean[method_type+rgpr_suffix][FARAWAY]
bold_vanilla = mean_vanilla >= mean_rgp
bold_rgp = mean_rgp >= mean_vanilla
str_vanilla = f'\\textbf{{{mean_vanilla:.3f}}}' if bold_vanilla else f'{mean_vanilla:.3f}'
str_rgp = f'\\textbf{{{mean_rgp:.3f}}}' if bold_rgp else f'{mean_rgp:.3f}'
values[method_type].append(str_vanilla)
values[method_type+rgpr_suffix].append(str_rgp)
print()
for i, method_type in enumerate(values.keys()):
if i % 2 == 0 and i > 0:
print()
print('\\midrule')
print()
latex_str = f'{method_type} & {" & ".join(values[method_type])} \\\\'
print(latex_str)
print()
print("==================================================================================")
print()
# ========================== RMSE ====================================
values = defaultdict(list)
for dset in datasets:
rmses = []
def cond(fname, str):
return dset.lower() in fname and str in fname
for fname in [fname for fname in filenames if cond(fname, '_rmse_')]:
with open(f'{path}/{fname}', 'rb') as f:
d = pickle.load(f)
# print(d); input()
rmses.append(pd.DataFrame([d]))
df_rmse = pd.concat(rmses, ignore_index=False)
df_rmse_mean = df_rmse.groupby(df_rmse.index).mean()
df_rmse_std = df_rmse.groupby(df_rmse.index).std()
for method_type in method_types:
mean_vanilla = df_rmse_mean[method_type][0]
mean_rgp = df_rmse_mean[method_type+rgpr_suffix][0]
std_vanilla = df_rmse_std[method_type][0]
std_rgp = df_rmse_std[method_type+rgpr_suffix][0]
bold_vanilla = mean_vanilla <= round(mean_rgp+std_rgp, 1)
bold_rgp = mean_rgp <= round(mean_vanilla+std_vanilla, 1)
str_vanilla = f'\\textbf{{{mean_vanilla:.3f}}}' if bold_vanilla else f'{mean_vanilla:.3f}'
str_rgp = f'\\textbf{{{mean_rgp:.3f}}}' if bold_rgp else f'{mean_rgp:.3f}'
str_vanilla += f'$\pm${std_vanilla:.3f}'
str_rgp += f'$\pm${std_rgp:.3f}'
values[method_type].append(str_vanilla)
values[method_type+rgpr_suffix].append(str_rgp)
print()
for i, method_type in enumerate(values.keys()):
if i % 2 == 0 and i > 0:
print()
print('\\midrule')
print()
latex_str = f'{method_type} & {" & ".join(values[method_type])} \\\\'
print(latex_str)
print() | aggregate_reg_faraway.py | import numpy as np
import pickle
import os, sys, argparse
from util.tables import *
from collections import defaultdict
import pandas as pd
path = f'./results/regression'
_, _, filenames = next(os.walk(path))
method_types = ['LLL', 'KFL', 'SWAG', 'SVGP']
rgpr_suffix = '-RGPR'
datasets = ['boston_housing', 'concrete', 'energy', 'wine']
FARAWAY = 'FarAway'
TEXTBF = '\\textbf'
# ========================== Error-bars ====================================
values = defaultdict(list)
for dset in datasets:
stds = []
def cond(fname, str):
return f'_{dset.lower()}_' in fname and str in fname
for fname in [fname for fname in filenames if cond(fname, '_std_')]:
with open(f'{path}/{fname}', 'rb') as f:
d = pickle.load(f)
stds.append(pd.DataFrame(d))
df_std = pd.concat(stds, ignore_index=False)
df_std_mean = df_std.groupby(df_std.index).mean()
for method_type in method_types:
mean_vanilla = df_std_mean[method_type][dset]
mean_rgp = df_std_mean[method_type+rgpr_suffix][dset]
# bold_vanilla = mean_vanilla <= mean_rgp
# bold_rgp = mean_rgp <= mean_vanilla
bold_vanilla = False
bold_rgp = False
str_vanilla = f'\\textbf{{{mean_vanilla:.3f}}}' if bold_vanilla else f'{mean_vanilla:.3f}'
str_rgp = f'\\textbf{{{mean_rgp:.3f}}}' if bold_rgp else f'{mean_rgp:.3f}'
values[method_type].append(str_vanilla)
values[method_type+rgpr_suffix].append(str_rgp)
for method_type in method_types:
mean_vanilla = df_std_mean[method_type][FARAWAY]
mean_rgp = df_std_mean[method_type+rgpr_suffix][FARAWAY]
bold_vanilla = mean_vanilla >= mean_rgp
bold_rgp = mean_rgp >= mean_vanilla
str_vanilla = f'\\textbf{{{mean_vanilla:.3f}}}' if bold_vanilla else f'{mean_vanilla:.3f}'
str_rgp = f'\\textbf{{{mean_rgp:.3f}}}' if bold_rgp else f'{mean_rgp:.3f}'
values[method_type].append(str_vanilla)
values[method_type+rgpr_suffix].append(str_rgp)
print()
for i, method_type in enumerate(values.keys()):
if i % 2 == 0 and i > 0:
print()
print('\\midrule')
print()
latex_str = f'{method_type} & {" & ".join(values[method_type])} \\\\'
print(latex_str)
print()
print("==================================================================================")
print()
# ========================== RMSE ====================================
values = defaultdict(list)
for dset in datasets:
rmses = []
def cond(fname, str):
return dset.lower() in fname and str in fname
for fname in [fname for fname in filenames if cond(fname, '_rmse_')]:
with open(f'{path}/{fname}', 'rb') as f:
d = pickle.load(f)
# print(d); input()
rmses.append(pd.DataFrame([d]))
df_rmse = pd.concat(rmses, ignore_index=False)
df_rmse_mean = df_rmse.groupby(df_rmse.index).mean()
df_rmse_std = df_rmse.groupby(df_rmse.index).std()
for method_type in method_types:
mean_vanilla = df_rmse_mean[method_type][0]
mean_rgp = df_rmse_mean[method_type+rgpr_suffix][0]
std_vanilla = df_rmse_std[method_type][0]
std_rgp = df_rmse_std[method_type+rgpr_suffix][0]
bold_vanilla = mean_vanilla <= round(mean_rgp+std_rgp, 1)
bold_rgp = mean_rgp <= round(mean_vanilla+std_vanilla, 1)
str_vanilla = f'\\textbf{{{mean_vanilla:.3f}}}' if bold_vanilla else f'{mean_vanilla:.3f}'
str_rgp = f'\\textbf{{{mean_rgp:.3f}}}' if bold_rgp else f'{mean_rgp:.3f}'
str_vanilla += f'$\pm${std_vanilla:.3f}'
str_rgp += f'$\pm${std_rgp:.3f}'
values[method_type].append(str_vanilla)
values[method_type+rgpr_suffix].append(str_rgp)
print()
for i, method_type in enumerate(values.keys()):
if i % 2 == 0 and i > 0:
print()
print('\\midrule')
print()
latex_str = f'{method_type} & {" & ".join(values[method_type])} \\\\'
print(latex_str)
print() | 0.198297 | 0.226078 |
import unittest
import winnow
from winnow.models.base import WinnowVersion
import json
from db import MockKVStore
BASE_PRODUCT = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"red", u"green", u"blue"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"wood", u"metal", u"plastic"]
}
}
class TestMergeCreatesExceptionValue(unittest.TestCase):
def setUp(self):
self.db = MockKVStore()
self.base_version = WinnowVersion.add_doc(self.db, BASE_PRODUCT, {})
def test_does_a_merge(self):
other_dict = {u"name": u"something",
u"description": u"these are other options",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"medium"],
u"tool": [u"cnc", u"laser", u"plaster"],
u"days": [u"tuesday", u"thursday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"crisps", u"cheese", u"apple"]
}
}
expected = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"blue", u"red"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"metal", u"plastic", u"wood"],
u"days": [u"thursday", u"tuesday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"apple", u"cheese", u"crisps"]
}
}
other_version = WinnowVersion.add_doc(self.db, other_dict, {})
merged = WinnowVersion.merged(self.db, BASE_PRODUCT, {}, self.base_version, other_version)
size = merged.get_doc()["options"]["size"]
self.assertTrue(isinstance(size, dict))
self.assertEqual(size["type"],"exception")
self.assertEqual(size["values"], [[u'big', u'small'], u'medium'])
self.assertTrue("errors" in merged.get_doc())
print json.dumps(merged.get_doc()["errors"], indent=4)
expected_error = [
[
{
"values": [
[
"big",
"small"
],
"medium"
],
"type": "exception",
"context": {
"source_b": {
"description": "these are other options",
"options": {
"snacks": [
"crisps",
"cheese",
"apple"
],
"color": [
"red",
"blue"
],
"tool": [
"cnc",
"laser",
"plaster"
],
"days": [
"tuesday",
"thursday"
],
"drinks": [
"beer",
"coffee"
],
"size": [
"medium"
]
},
"name": "something"
},
"source_a": {
"description": "This is a very nice table",
"options": {
"color": [
"red",
"green",
"blue"
],
"tool": [
"cnc",
"laser"
],
"material": [
"wood",
"metal",
"plastic"
],
"size": [
"big",
"small"
]
},
"name": "table"
}
},
"key": "size"
}
]
]
# print "errors: ", merged.get_doc()["errors"]
# self.assertEqual(merged.get_doc()["errors"], expected_error)
def test_can_merge_exception (self):
other_dict = {u"name": u"something",
u"description": u"these are other options",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"medium"],
u"tool": [u"cnc", u"laser", u"plaster"],
u"days": [u"tuesday", u"thursday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"crisps", u"cheese", u"apple"]
}
}
third_dict = {u"name": u"elephant",
u"description": u"another bunch of stuff",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"small"],
u"coffee": [u"latte", u"piccolo"]
}
}
expected = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"blue", u"red"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"metal", u"plastic", u"wood"],
u"days": [u"thursday", u"tuesday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"apple", u"cheese", u"crisps"]
}
}
other_version = WinnowVersion.add_doc(self.db, other_dict, {})
merged_version = WinnowVersion.merged(self.db, BASE_PRODUCT, {}, self.base_version, other_version)
third_version = WinnowVersion.add_doc(self.db, third_dict, {})
merged_again = WinnowVersion.merged(self.db, merged_version.get_doc(), {}, merged_version, third_version)
size = merged_again.get_doc()["options"]["size"]
self.assertTrue(isinstance(size, dict))
self.assertEqual(size["type"],"exception")
self.assertEqual(size["values"], [[u'big', u'small'], u'medium'])
def test_default(self):
other_dict = {u"name": u"something",
u"description": u"these are other options",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"medium"],
u"tool": [u"cnc", u"laser", u"plaster"],
u"days": [u"tuesday", u"thursday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"crisps", u"cheese", u"apple"]
}
}
expected = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"blue", u"red"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"metal", u"plastic", u"wood"],
u"days": [u"thursday", u"tuesday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"apple", u"cheese", u"crisps"]
}
}
other_version = WinnowVersion.add_doc(self.db, other_dict, {})
merged_version = WinnowVersion.merged(self.db, BASE_PRODUCT, {}, self.base_version, other_version)
default = winnow.default_choices(merged_version, []) | src/winnow/tests/empty_merge_tests.py | import unittest
import winnow
from winnow.models.base import WinnowVersion
import json
from db import MockKVStore
BASE_PRODUCT = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"red", u"green", u"blue"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"wood", u"metal", u"plastic"]
}
}
class TestMergeCreatesExceptionValue(unittest.TestCase):
def setUp(self):
self.db = MockKVStore()
self.base_version = WinnowVersion.add_doc(self.db, BASE_PRODUCT, {})
def test_does_a_merge(self):
other_dict = {u"name": u"something",
u"description": u"these are other options",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"medium"],
u"tool": [u"cnc", u"laser", u"plaster"],
u"days": [u"tuesday", u"thursday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"crisps", u"cheese", u"apple"]
}
}
expected = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"blue", u"red"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"metal", u"plastic", u"wood"],
u"days": [u"thursday", u"tuesday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"apple", u"cheese", u"crisps"]
}
}
other_version = WinnowVersion.add_doc(self.db, other_dict, {})
merged = WinnowVersion.merged(self.db, BASE_PRODUCT, {}, self.base_version, other_version)
size = merged.get_doc()["options"]["size"]
self.assertTrue(isinstance(size, dict))
self.assertEqual(size["type"],"exception")
self.assertEqual(size["values"], [[u'big', u'small'], u'medium'])
self.assertTrue("errors" in merged.get_doc())
print json.dumps(merged.get_doc()["errors"], indent=4)
expected_error = [
[
{
"values": [
[
"big",
"small"
],
"medium"
],
"type": "exception",
"context": {
"source_b": {
"description": "these are other options",
"options": {
"snacks": [
"crisps",
"cheese",
"apple"
],
"color": [
"red",
"blue"
],
"tool": [
"cnc",
"laser",
"plaster"
],
"days": [
"tuesday",
"thursday"
],
"drinks": [
"beer",
"coffee"
],
"size": [
"medium"
]
},
"name": "something"
},
"source_a": {
"description": "This is a very nice table",
"options": {
"color": [
"red",
"green",
"blue"
],
"tool": [
"cnc",
"laser"
],
"material": [
"wood",
"metal",
"plastic"
],
"size": [
"big",
"small"
]
},
"name": "table"
}
},
"key": "size"
}
]
]
# print "errors: ", merged.get_doc()["errors"]
# self.assertEqual(merged.get_doc()["errors"], expected_error)
def test_can_merge_exception (self):
other_dict = {u"name": u"something",
u"description": u"these are other options",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"medium"],
u"tool": [u"cnc", u"laser", u"plaster"],
u"days": [u"tuesday", u"thursday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"crisps", u"cheese", u"apple"]
}
}
third_dict = {u"name": u"elephant",
u"description": u"another bunch of stuff",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"small"],
u"coffee": [u"latte", u"piccolo"]
}
}
expected = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"blue", u"red"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"metal", u"plastic", u"wood"],
u"days": [u"thursday", u"tuesday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"apple", u"cheese", u"crisps"]
}
}
other_version = WinnowVersion.add_doc(self.db, other_dict, {})
merged_version = WinnowVersion.merged(self.db, BASE_PRODUCT, {}, self.base_version, other_version)
third_version = WinnowVersion.add_doc(self.db, third_dict, {})
merged_again = WinnowVersion.merged(self.db, merged_version.get_doc(), {}, merged_version, third_version)
size = merged_again.get_doc()["options"]["size"]
self.assertTrue(isinstance(size, dict))
self.assertEqual(size["type"],"exception")
self.assertEqual(size["values"], [[u'big', u'small'], u'medium'])
def test_default(self):
other_dict = {u"name": u"something",
u"description": u"these are other options",
u"options":{
u"color": [u"red", u"blue"],
u"size": [u"medium"],
u"tool": [u"cnc", u"laser", u"plaster"],
u"days": [u"tuesday", u"thursday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"crisps", u"cheese", u"apple"]
}
}
expected = {u"name": u"table",
u"description": u"This is a very nice table",
u"options":{
u"color": [u"blue", u"red"],
u"size": [u"big", u"small"],
u"tool": [u"cnc", u"laser"],
u"material": [u"metal", u"plastic", u"wood"],
u"days": [u"thursday", u"tuesday"],
u"drinks": [u"beer", u"coffee"],
u"snacks": [u"apple", u"cheese", u"crisps"]
}
}
other_version = WinnowVersion.add_doc(self.db, other_dict, {})
merged_version = WinnowVersion.merged(self.db, BASE_PRODUCT, {}, self.base_version, other_version)
default = winnow.default_choices(merged_version, []) | 0.296858 | 0.415136 |
__author__ = '<EMAIL> (<NAME>)'
import csv
import re
import sys
import GeoIP
sys.path.append('..')
sys.path.append('/Users/tstromberg/namebench')
import third_party
from libnamebench import nameserver_list
from libnamebench import config
from libnamebench import addr_util
import check_nameserver_popularity
gi = GeoIP.open('/usr/local/share/GeoLiteCity.dat', GeoIP.GEOIP_MEMORY_CACHE)
asn_lookup = GeoIP.open('/usr/local/share/GeoIPASNum.dat', GeoIP.GEOIP_MEMORY_CACHE)
existing_nameservers = config.GetLocalNameServerList()
check_ns = []
output = csv.writer(open('output.csv', 'w'))
for line in sys.stdin:
ips = addr_util.ExtractIPsFromString(line)
for ip in ips:
print ip
# disable IPV6 until we can improve our regular expression matching
if ':' in ip:
continue
if ip not in existing_nameservers:
check_ns.append((ip, ip))
if not check_ns:
print "no new servers to check"
sys.exit(1)
else:
print "%s servers to check" % len(check_ns)
print '-' * 80
nameserver_list.MAX_INITIAL_HEALTH_THREAD_COUNT = 100
nameservers = nameserver_list.NameServers([],
global_servers=check_ns,
timeout=10,
health_timeout=10,
threads=100,
num_servers=5000,
skip_cache_collusion_checks=True,
)
nameservers.min_healthy_percent = 0
sanity_checks = config.GetLocalSanityChecks()
try:
nameservers.CheckHealth(sanity_checks['primary'], sanity_checks['secondary'])
except nameserver_list.TooFewNameservers:
pass
print '-' * 80
for ns in nameservers:
try:
details = gi.record_by_addr(ns.ip)
except:
pass
if not details:
details = {}
city = details.get('city', '')
if city:
city = city.decode('latin-1')
latitude = details.get('latitude', '')
longitude = details.get('longitude', '')
country = details.get('country_name', '')
if country:
country = country.decode('latin-1')
country_code = details.get('country_code', '')
region = details.get('region_name', '')
if region:
region = region.decode('latin-1')
try:
results = check_nameserver_popularity.CheckPopularity(ns.ip)
urls = [ x['Url'] for x in results ]
except:
urls = ['(exception)']
num_urls = len(urls)
main = "%s=UNKNOWN" % ns.ip
if 'Responded with: REFUSED' in ns.warnings:
note = '_REFUSED_'
elif 'a.root-servers.net.: Timeout' in ns.warnings:
note = '_TIMEOUT_'
elif 'No answer (NOERROR): a.root-servers.net.' in ns.warnings:
note = '_NOANSWER_'
elif ns.warnings:
note = '_WARNING/%s_' % '/'.join(list(ns.warnings))
else:
note = ''
if ns.hostname != ns.ip:
domain = addr_util.GetDomainPartOfHostname(ns.hostname)
if domain:
good_urls = [x for x in urls if re.search(domain, x, re.I)]
if good_urls:
urls = good_urls
geo = '/'.join([x for x in [country_code, region, city] if x and not x.isdigit()]).encode('utf-8')
coords = ','.join(map(str, [latitude,longitude]))
asn = asn_lookup.org_by_addr(ns.ip)
row = [ns.ip, 'regional', 'UNKNOWN', '', ns.hostname, geo, coords, asn, note, num_urls, ' '.join(urls[:2]), ns.version]
print row
output.writerow(row) | tools/check_dns_servers.py | __author__ = '<EMAIL> (<NAME>)'
import csv
import re
import sys
import GeoIP
sys.path.append('..')
sys.path.append('/Users/tstromberg/namebench')
import third_party
from libnamebench import nameserver_list
from libnamebench import config
from libnamebench import addr_util
import check_nameserver_popularity
gi = GeoIP.open('/usr/local/share/GeoLiteCity.dat', GeoIP.GEOIP_MEMORY_CACHE)
asn_lookup = GeoIP.open('/usr/local/share/GeoIPASNum.dat', GeoIP.GEOIP_MEMORY_CACHE)
existing_nameservers = config.GetLocalNameServerList()
check_ns = []
output = csv.writer(open('output.csv', 'w'))
for line in sys.stdin:
ips = addr_util.ExtractIPsFromString(line)
for ip in ips:
print ip
# disable IPV6 until we can improve our regular expression matching
if ':' in ip:
continue
if ip not in existing_nameservers:
check_ns.append((ip, ip))
if not check_ns:
print "no new servers to check"
sys.exit(1)
else:
print "%s servers to check" % len(check_ns)
print '-' * 80
nameserver_list.MAX_INITIAL_HEALTH_THREAD_COUNT = 100
nameservers = nameserver_list.NameServers([],
global_servers=check_ns,
timeout=10,
health_timeout=10,
threads=100,
num_servers=5000,
skip_cache_collusion_checks=True,
)
nameservers.min_healthy_percent = 0
sanity_checks = config.GetLocalSanityChecks()
try:
nameservers.CheckHealth(sanity_checks['primary'], sanity_checks['secondary'])
except nameserver_list.TooFewNameservers:
pass
print '-' * 80
for ns in nameservers:
try:
details = gi.record_by_addr(ns.ip)
except:
pass
if not details:
details = {}
city = details.get('city', '')
if city:
city = city.decode('latin-1')
latitude = details.get('latitude', '')
longitude = details.get('longitude', '')
country = details.get('country_name', '')
if country:
country = country.decode('latin-1')
country_code = details.get('country_code', '')
region = details.get('region_name', '')
if region:
region = region.decode('latin-1')
try:
results = check_nameserver_popularity.CheckPopularity(ns.ip)
urls = [ x['Url'] for x in results ]
except:
urls = ['(exception)']
num_urls = len(urls)
main = "%s=UNKNOWN" % ns.ip
if 'Responded with: REFUSED' in ns.warnings:
note = '_REFUSED_'
elif 'a.root-servers.net.: Timeout' in ns.warnings:
note = '_TIMEOUT_'
elif 'No answer (NOERROR): a.root-servers.net.' in ns.warnings:
note = '_NOANSWER_'
elif ns.warnings:
note = '_WARNING/%s_' % '/'.join(list(ns.warnings))
else:
note = ''
if ns.hostname != ns.ip:
domain = addr_util.GetDomainPartOfHostname(ns.hostname)
if domain:
good_urls = [x for x in urls if re.search(domain, x, re.I)]
if good_urls:
urls = good_urls
geo = '/'.join([x for x in [country_code, region, city] if x and not x.isdigit()]).encode('utf-8')
coords = ','.join(map(str, [latitude,longitude]))
asn = asn_lookup.org_by_addr(ns.ip)
row = [ns.ip, 'regional', 'UNKNOWN', '', ns.hostname, geo, coords, asn, note, num_urls, ' '.join(urls[:2]), ns.version]
print row
output.writerow(row) | 0.080891 | 0.085978 |
import platform
import time
from uuid import uuid4
from django.conf import settings
from django.db import models
from django.db.models import F
from django.db.models import QuerySet
from morango.models import UUIDField
from morango.models.core import SyncSession
from .utils import LANDING_PAGE_LEARN
from .utils import LANDING_PAGE_SIGN_IN
from kolibri.core.auth.constants import role_kinds
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.base import RoleBasedPermissions
from kolibri.core.auth.permissions.general import IsOwn
from kolibri.core.utils.cache import process_cache as cache
from kolibri.deployment.default.sqlite_db_names import SYNC_QUEUE
from kolibri.plugins.app.utils import interface
device_permissions_fields = ["is_superuser", "can_manage_content"]
class DevicePermissions(models.Model):
"""
This class stores metadata about device permissions for FacilityUsers.
"""
user = models.OneToOneField(
FacilityUser,
on_delete=models.CASCADE,
related_name="devicepermissions",
blank=False,
null=False,
primary_key=True,
)
is_superuser = models.BooleanField(default=False)
can_manage_content = models.BooleanField(default=False)
DEVICE_SETTINGS_CACHE_KEY = "device_settings_cache_key"
class DeviceSettingsQuerySet(QuerySet):
def delete(self, **kwargs):
cache.delete(DEVICE_SETTINGS_CACHE_KEY)
return super(DeviceSettingsQuerySet, self).delete(**kwargs)
class DeviceSettingsManager(models.Manager.from_queryset(DeviceSettingsQuerySet)):
def get(self, **kwargs):
if DEVICE_SETTINGS_CACHE_KEY not in cache:
model = super(DeviceSettingsManager, self).get(**kwargs)
cache.set(DEVICE_SETTINGS_CACHE_KEY, model, 600)
else:
model = cache.get(DEVICE_SETTINGS_CACHE_KEY)
return model
def get_device_hostname():
# Get the device hostname to set it as the default value of name field in
# DeviceSettings model
hostname = platform.node()
# make sure the default name does not exceed max length of the field
return hostname[:50]
def app_is_enabled():
return interface.enabled
class DeviceSettings(models.Model):
"""
This class stores data about settings particular to this device
"""
LANDING_PAGE_CHOICES = [
(LANDING_PAGE_SIGN_IN, "Sign-in page"),
(LANDING_PAGE_LEARN, "Learn page"),
]
objects = DeviceSettingsManager()
# Has this device gone through initial setup yet?
is_provisioned = models.BooleanField(default=False)
# What is the default language that Kolibri is displayed in for this device?
language_id = models.CharField(
max_length=15, default=settings.LANGUAGE_CODE, blank=True, null=True
)
# What is the default facility for this device?
default_facility = models.ForeignKey(
Facility, on_delete=models.SET_NULL, blank=True, null=True
)
# Where should we redirect to on first page load?
landing_page = models.CharField(
max_length=7, choices=LANDING_PAGE_CHOICES, default=LANDING_PAGE_SIGN_IN
)
# Should users be able to browse content on this device without logging in?
allow_guest_access = models.BooleanField(default=True)
# Should peer devices be able to import non-public channels from this device?
allow_peer_unlisted_channel_import = models.BooleanField(default=False)
# Should learners be able to access resources that are not assigned to them on this device?
allow_learner_unassigned_resource_access = models.BooleanField(default=True)
# What's the name of this device?
name = models.CharField(max_length=50, default=get_device_hostname)
# Should this device allow browser sessions from non-localhost devices?
allow_other_browsers_to_connect = models.BooleanField(default=app_is_enabled)
# Is this a device that only synchronizes data about a subset of users?
subset_of_users_device = models.BooleanField(default=False)
def save(self, *args, **kwargs):
self.pk = 1
self.full_clean()
out = super(DeviceSettings, self).save(*args, **kwargs)
cache.set(DEVICE_SETTINGS_CACHE_KEY, self, 600)
return out
def delete(self, *args, **kwargs):
out = super(DeviceSettings, self).delete(*args, **kwargs)
cache.delete(DEVICE_SETTINGS_CACHE_KEY)
return out
CONTENT_CACHE_KEY_CACHE_KEY = "content_cache_key"
class ContentCacheKey(models.Model):
"""
This class stores a cache key for content models that should be updated
whenever the content metadata stored on the device changes.
"""
key = models.IntegerField(default=time.time)
def save(self, *args, **kwargs):
self.pk = 1
super(ContentCacheKey, self).save(*args, **kwargs)
@classmethod
def update_cache_key(cls):
cache_key, created = cls.objects.get_or_create()
cache_key.key = time.time()
cache_key.save()
cache.set(CONTENT_CACHE_KEY_CACHE_KEY, cache_key.key, 5000)
return cache_key
@classmethod
def get_cache_key(cls):
key = cache.get(CONTENT_CACHE_KEY_CACHE_KEY)
if key is None:
try:
cache_key = cls.objects.get()
except cls.DoesNotExist:
cache_key = cls.update_cache_key()
key = cache_key.key
cache.set(CONTENT_CACHE_KEY_CACHE_KEY, key, 5000)
return key
APP_KEY_CACHE_KEY = "app_key"
class DeviceAppKey(models.Model):
"""
This class stores a key that is checked to make sure that a webview
is making requests from a privileged device (i.e. from inside an
app-wrapper webview)
"""
key = UUIDField(default=uuid4)
def save(self, *args, **kwargs):
self.pk = 1
super(DeviceAppKey, self).save(*args, **kwargs)
@classmethod
def update_app_key(cls):
app_key, created = cls.objects.get_or_create()
app_key.key = uuid4().hex
app_key.save()
cache.set(APP_KEY_CACHE_KEY, app_key.key, 5000)
return app_key
@classmethod
def get_app_key(cls):
key = cache.get(APP_KEY_CACHE_KEY)
if key is None:
try:
app_key = cls.objects.get()
except cls.DoesNotExist:
app_key = cls.update_app_key()
key = app_key.key
cache.set(APP_KEY_CACHE_KEY, key, 5000)
return key
class SQLiteLock(models.Model):
id = models.AutoField(primary_key=True)
def save(self, *args, **kwargs):
self.pk = 1
super(SQLiteLock, self).save(*args, **kwargs)
class SyncQueue(models.Model):
"""
This class maintains the queue of the devices that try to sync
with this server
"""
id = UUIDField(primary_key=True, default=uuid4)
user_id = UUIDField(blank=False, null=False)
instance_id = UUIDField(blank=False, null=False)
datetime = models.DateTimeField(auto_now_add=True)
updated = models.FloatField(default=time.time)
# polling interval is 5 seconds by default
keep_alive = models.FloatField(default=5.0)
@classmethod
def clean_stale(cls):
"""
This method will delete all the devices from the queue
with the expire time (in seconds) exhausted
"""
cls.objects.filter(updated__lte=time.time() - F("keep_alive") * 2).delete()
class SyncQueueRouter(object):
"""
Determine how to route database calls for the SyncQueue model.
All other models will be routed to the default database.
"""
def db_for_read(self, model, **hints):
"""Send all read operations on the SyncQueue model to SYNC_QUEUE."""
if model is SyncQueue:
return SYNC_QUEUE
return None
def db_for_write(self, model, **hints):
"""Send all write operations on the SyncQueue model to SYNC_QUEUE."""
if model is SyncQueue:
return SYNC_QUEUE
return None
def allow_relation(self, obj1, obj2, **hints):
"""Determine if relationship is allowed between two objects."""
# Allow any relation between SyncQueue and SyncQueue.
if obj1._meta.model is SyncQueue and obj2._meta.model is SyncQueue:
return True
# No opinion if neither object is a SyncQueue.
elif SyncQueue not in [obj1._meta.model, obj2._meta.model]:
return None
# Block relationship if one object is a SyncQueue model and the other isn't.
return False
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""Ensure that the SyncQueue models get created on the right database."""
if (
app_label == SyncQueue._meta.app_label
and model_name == SyncQueue._meta.model_name
):
# The SyncQueue model should be migrated only on the SYNC_QUEUE database.
return db == SYNC_QUEUE
elif db == SYNC_QUEUE:
# Ensure that all other apps don't get migrated on the SYNC_QUEUE database.
return False
# No opinion for all other scenarios
return None
class UserSyncStatus(models.Model):
user = models.ForeignKey(FacilityUser, on_delete=models.CASCADE, null=False)
sync_session = models.ForeignKey(
SyncSession, on_delete=models.SET_NULL, null=True, blank=True
)
queued = models.BooleanField(default=False)
# users can read their own SyncStatus
own = IsOwn(read_only=True)
# SyncStatus can be read by admins, and coaches, for the member user
role = RoleBasedPermissions(
target_field="user",
can_be_created_by=(),
can_be_read_by=(role_kinds.ADMIN, role_kinds.COACH),
can_be_updated_by=(),
can_be_deleted_by=(),
collection_field="user__memberships__collection",
is_syncable=False,
)
permissions = own | role | kolibri/core/device/models.py | import platform
import time
from uuid import uuid4
from django.conf import settings
from django.db import models
from django.db.models import F
from django.db.models import QuerySet
from morango.models import UUIDField
from morango.models.core import SyncSession
from .utils import LANDING_PAGE_LEARN
from .utils import LANDING_PAGE_SIGN_IN
from kolibri.core.auth.constants import role_kinds
from kolibri.core.auth.models import Facility
from kolibri.core.auth.models import FacilityUser
from kolibri.core.auth.permissions.base import RoleBasedPermissions
from kolibri.core.auth.permissions.general import IsOwn
from kolibri.core.utils.cache import process_cache as cache
from kolibri.deployment.default.sqlite_db_names import SYNC_QUEUE
from kolibri.plugins.app.utils import interface
device_permissions_fields = ["is_superuser", "can_manage_content"]
class DevicePermissions(models.Model):
"""
This class stores metadata about device permissions for FacilityUsers.
"""
user = models.OneToOneField(
FacilityUser,
on_delete=models.CASCADE,
related_name="devicepermissions",
blank=False,
null=False,
primary_key=True,
)
is_superuser = models.BooleanField(default=False)
can_manage_content = models.BooleanField(default=False)
DEVICE_SETTINGS_CACHE_KEY = "device_settings_cache_key"
class DeviceSettingsQuerySet(QuerySet):
def delete(self, **kwargs):
cache.delete(DEVICE_SETTINGS_CACHE_KEY)
return super(DeviceSettingsQuerySet, self).delete(**kwargs)
class DeviceSettingsManager(models.Manager.from_queryset(DeviceSettingsQuerySet)):
def get(self, **kwargs):
if DEVICE_SETTINGS_CACHE_KEY not in cache:
model = super(DeviceSettingsManager, self).get(**kwargs)
cache.set(DEVICE_SETTINGS_CACHE_KEY, model, 600)
else:
model = cache.get(DEVICE_SETTINGS_CACHE_KEY)
return model
def get_device_hostname():
# Get the device hostname to set it as the default value of name field in
# DeviceSettings model
hostname = platform.node()
# make sure the default name does not exceed max length of the field
return hostname[:50]
def app_is_enabled():
return interface.enabled
class DeviceSettings(models.Model):
"""
This class stores data about settings particular to this device
"""
LANDING_PAGE_CHOICES = [
(LANDING_PAGE_SIGN_IN, "Sign-in page"),
(LANDING_PAGE_LEARN, "Learn page"),
]
objects = DeviceSettingsManager()
# Has this device gone through initial setup yet?
is_provisioned = models.BooleanField(default=False)
# What is the default language that Kolibri is displayed in for this device?
language_id = models.CharField(
max_length=15, default=settings.LANGUAGE_CODE, blank=True, null=True
)
# What is the default facility for this device?
default_facility = models.ForeignKey(
Facility, on_delete=models.SET_NULL, blank=True, null=True
)
# Where should we redirect to on first page load?
landing_page = models.CharField(
max_length=7, choices=LANDING_PAGE_CHOICES, default=LANDING_PAGE_SIGN_IN
)
# Should users be able to browse content on this device without logging in?
allow_guest_access = models.BooleanField(default=True)
# Should peer devices be able to import non-public channels from this device?
allow_peer_unlisted_channel_import = models.BooleanField(default=False)
# Should learners be able to access resources that are not assigned to them on this device?
allow_learner_unassigned_resource_access = models.BooleanField(default=True)
# What's the name of this device?
name = models.CharField(max_length=50, default=get_device_hostname)
# Should this device allow browser sessions from non-localhost devices?
allow_other_browsers_to_connect = models.BooleanField(default=app_is_enabled)
# Is this a device that only synchronizes data about a subset of users?
subset_of_users_device = models.BooleanField(default=False)
def save(self, *args, **kwargs):
self.pk = 1
self.full_clean()
out = super(DeviceSettings, self).save(*args, **kwargs)
cache.set(DEVICE_SETTINGS_CACHE_KEY, self, 600)
return out
def delete(self, *args, **kwargs):
out = super(DeviceSettings, self).delete(*args, **kwargs)
cache.delete(DEVICE_SETTINGS_CACHE_KEY)
return out
CONTENT_CACHE_KEY_CACHE_KEY = "content_cache_key"
class ContentCacheKey(models.Model):
"""
This class stores a cache key for content models that should be updated
whenever the content metadata stored on the device changes.
"""
key = models.IntegerField(default=time.time)
def save(self, *args, **kwargs):
self.pk = 1
super(ContentCacheKey, self).save(*args, **kwargs)
@classmethod
def update_cache_key(cls):
cache_key, created = cls.objects.get_or_create()
cache_key.key = time.time()
cache_key.save()
cache.set(CONTENT_CACHE_KEY_CACHE_KEY, cache_key.key, 5000)
return cache_key
@classmethod
def get_cache_key(cls):
key = cache.get(CONTENT_CACHE_KEY_CACHE_KEY)
if key is None:
try:
cache_key = cls.objects.get()
except cls.DoesNotExist:
cache_key = cls.update_cache_key()
key = cache_key.key
cache.set(CONTENT_CACHE_KEY_CACHE_KEY, key, 5000)
return key
APP_KEY_CACHE_KEY = "app_key"
class DeviceAppKey(models.Model):
"""
This class stores a key that is checked to make sure that a webview
is making requests from a privileged device (i.e. from inside an
app-wrapper webview)
"""
key = UUIDField(default=uuid4)
def save(self, *args, **kwargs):
self.pk = 1
super(DeviceAppKey, self).save(*args, **kwargs)
@classmethod
def update_app_key(cls):
app_key, created = cls.objects.get_or_create()
app_key.key = uuid4().hex
app_key.save()
cache.set(APP_KEY_CACHE_KEY, app_key.key, 5000)
return app_key
@classmethod
def get_app_key(cls):
key = cache.get(APP_KEY_CACHE_KEY)
if key is None:
try:
app_key = cls.objects.get()
except cls.DoesNotExist:
app_key = cls.update_app_key()
key = app_key.key
cache.set(APP_KEY_CACHE_KEY, key, 5000)
return key
class SQLiteLock(models.Model):
id = models.AutoField(primary_key=True)
def save(self, *args, **kwargs):
self.pk = 1
super(SQLiteLock, self).save(*args, **kwargs)
class SyncQueue(models.Model):
"""
This class maintains the queue of the devices that try to sync
with this server
"""
id = UUIDField(primary_key=True, default=uuid4)
user_id = UUIDField(blank=False, null=False)
instance_id = UUIDField(blank=False, null=False)
datetime = models.DateTimeField(auto_now_add=True)
updated = models.FloatField(default=time.time)
# polling interval is 5 seconds by default
keep_alive = models.FloatField(default=5.0)
@classmethod
def clean_stale(cls):
"""
This method will delete all the devices from the queue
with the expire time (in seconds) exhausted
"""
cls.objects.filter(updated__lte=time.time() - F("keep_alive") * 2).delete()
class SyncQueueRouter(object):
"""
Determine how to route database calls for the SyncQueue model.
All other models will be routed to the default database.
"""
def db_for_read(self, model, **hints):
"""Send all read operations on the SyncQueue model to SYNC_QUEUE."""
if model is SyncQueue:
return SYNC_QUEUE
return None
def db_for_write(self, model, **hints):
"""Send all write operations on the SyncQueue model to SYNC_QUEUE."""
if model is SyncQueue:
return SYNC_QUEUE
return None
def allow_relation(self, obj1, obj2, **hints):
"""Determine if relationship is allowed between two objects."""
# Allow any relation between SyncQueue and SyncQueue.
if obj1._meta.model is SyncQueue and obj2._meta.model is SyncQueue:
return True
# No opinion if neither object is a SyncQueue.
elif SyncQueue not in [obj1._meta.model, obj2._meta.model]:
return None
# Block relationship if one object is a SyncQueue model and the other isn't.
return False
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""Ensure that the SyncQueue models get created on the right database."""
if (
app_label == SyncQueue._meta.app_label
and model_name == SyncQueue._meta.model_name
):
# The SyncQueue model should be migrated only on the SYNC_QUEUE database.
return db == SYNC_QUEUE
elif db == SYNC_QUEUE:
# Ensure that all other apps don't get migrated on the SYNC_QUEUE database.
return False
# No opinion for all other scenarios
return None
class UserSyncStatus(models.Model):
user = models.ForeignKey(FacilityUser, on_delete=models.CASCADE, null=False)
sync_session = models.ForeignKey(
SyncSession, on_delete=models.SET_NULL, null=True, blank=True
)
queued = models.BooleanField(default=False)
# users can read their own SyncStatus
own = IsOwn(read_only=True)
# SyncStatus can be read by admins, and coaches, for the member user
role = RoleBasedPermissions(
target_field="user",
can_be_created_by=(),
can_be_read_by=(role_kinds.ADMIN, role_kinds.COACH),
can_be_updated_by=(),
can_be_deleted_by=(),
collection_field="user__memberships__collection",
is_syncable=False,
)
permissions = own | role | 0.506836 | 0.082365 |
import curses
from npyscreen import (
FormMultiPage,
fmActionFormV2,
ActionPopup,
NPSAppManaged,
MiniButtonPress,
notify,
)
class CustomNPSAppManaged(NPSAppManaged):
def removeCurrentFromHistory(self):
self._FORM_VISIT_LIST.pop()
class CustomFormMultiPage(FormMultiPage):
OK_BUTTON_TEXT = "Back"
def __init__(self, *args, **keywords):
super(CustomFormMultiPage, self).__init__(*args, **keywords)
self.edit_return_value = None
self.action = False
def display_footer_at(self):
return self.lines - 1, 1
def draw_form(self, *args, **keywords):
super(CustomFormMultiPage, self).draw_form()
footer = self.footer
if isinstance(footer, bytes):
footer = footer.decode("utf-8", "replace")
y, x = self.display_footer_at()
self.add_line(
y,
x,
footer,
self.make_attributes_list(footer, curses.A_NORMAL),
self.columns - x - 1,
)
def on_ok(self):
pass
def pre_edit_loop(self):
if self.action:
self._page_for_buttons = len(self._pages__) - 1
self.switch_page(self._page_for_buttons)
tmp_rely, tmp_relx = self.nextrely, self.nextrelx
my, mx = self.curses_pad.getmaxyx()
ok_button_text = self.OK_BUTTON_TEXT
my -= self.__class__.OK_BUTTON_BR_OFFSET[0]
mx -= len(ok_button_text) + self.__class__.OK_BUTTON_BR_OFFSET[1]
self.ok_button = self.add_widget(
self.__class__.OKBUTTON_TYPE,
name=ok_button_text,
rely=my,
relx=mx,
use_max_space=True,
)
self._ok_button_postion = len(self._widgets__) - 1
self.nextrely, self.nextrelx = tmp_rely, tmp_relx
self.switch_page(0)
def _during_edit_loop(self):
if self.action:
if self.ok_button.value:
self.editing = False
self.ok_button.value = False
self.edit_return_value = self.on_ok()
def resize(self):
if self.action:
super(CustomFormMultiPage, self).resize()
self.move_ok_button()
def move_ok_button(self):
if self.action:
if hasattr(self, "ok_button"):
my, mx = self.curses_pad.getmaxyx()
my -= self.__class__.OK_BUTTON_BR_OFFSET[0]
mx -= (
len(self.__class__.OK_BUTTON_TEXT)
+ self.__class__.OK_BUTTON_BR_OFFSET[1]
)
self.ok_button.relx = mx
self.ok_button.rely = my
def post_edit_loop(self):
if self.action:
self.switch_page(self._page_for_buttons)
self.ok_button.destroy()
del self._widgets__[self._ok_button_postion]
del self.ok_button
self.display()
self.editing = False
return self.edit_return_value
class CustomEditMenuPopup(fmActionFormV2.ActionFormV2):
DEFAULT_LINES = 12
DEFAULT_COLUMNS = 50
SHOW_ATX = 10
SHOW_ATY = 2
OK_BUTTON_TEXT = "Back"
def create_control_buttons(self):
self._add_button(
"ok_button",
self.__class__.OKBUTTON_TYPE,
self.__class__.OK_BUTTON_TEXT,
0 - self.__class__.OK_BUTTON_BR_OFFSET[0],
0
- self.__class__.OK_BUTTON_BR_OFFSET[1]
- len(self.__class__.OK_BUTTON_TEXT),
None,
)
class CustomAddDictEntryPopup(CustomEditMenuPopup):
OK_BUTTON_TEXT = "Add"
class CustomLoadPopup(ActionPopup):
OK_BUTTON_TEXT = "Load"
CANCEL_BUTTON_TEXT = "Back"
class CustomSavePopup(ActionPopup):
OK_BUTTON_TEXT = "Save"
CANCEL_BUTTON_TEXT = "Back"
class CustomCollectionButton(MiniButtonPress):
def __init__(self, screen, *args, **keywords):
super(CustomCollectionButton, self).__init__(screen, *args, **keywords)
self.color = "DEFAULT"
self.label_width = len(self.name)
def calculate_area_needed(self):
return 1, self.label_width
def update(self, clear=True):
if clear:
self.clear()
if self.hidden:
self.clear()
return False
if self.value and self.do_colors():
self.parent.curses_pad.addstr(
self.rely, self.relx, ">", self.parent.theme_manager.findPair(self)
)
self.parent.curses_pad.addstr(
self.rely,
self.relx + self.width - 1,
"<",
self.parent.theme_manager.findPair(self),
)
elif self.value:
self.parent.curses_pad.addstr(self.rely, self.relx, ">")
self.parent.curses_pad.addstr(self.rely, self.relx + self.width - 1, "<")
if self.editing:
button_state = curses.A_STANDOUT
else:
button_state = curses.A_NORMAL
button_name = self.name
if isinstance(button_name, bytes):
button_name = button_name.decode(self.encoding, "replace")
button_name = button_name.center(self.label_width)
if self.do_colors():
if self.cursor_color:
if self.editing:
button_attributes = self.parent.theme_manager.findPair(
self, self.cursor_color
)
else:
button_attributes = self.parent.theme_manager.findPair(
self, self.color
)
else:
button_attributes = (
self.parent.theme_manager.findPair(self, self.color) | button_state
)
else:
button_attributes = button_state
self.add_line(
self.rely,
self.relx,
button_name,
self.make_attributes_list(button_name, button_attributes),
self.label_width,
)
def custom_notify_wait(*args, **keywords):
notify(*args, **keywords)
curses.napms(1500)
curses.flushinp() | configsuite_tui/custom_widgets.py | import curses
from npyscreen import (
FormMultiPage,
fmActionFormV2,
ActionPopup,
NPSAppManaged,
MiniButtonPress,
notify,
)
class CustomNPSAppManaged(NPSAppManaged):
def removeCurrentFromHistory(self):
self._FORM_VISIT_LIST.pop()
class CustomFormMultiPage(FormMultiPage):
OK_BUTTON_TEXT = "Back"
def __init__(self, *args, **keywords):
super(CustomFormMultiPage, self).__init__(*args, **keywords)
self.edit_return_value = None
self.action = False
def display_footer_at(self):
return self.lines - 1, 1
def draw_form(self, *args, **keywords):
super(CustomFormMultiPage, self).draw_form()
footer = self.footer
if isinstance(footer, bytes):
footer = footer.decode("utf-8", "replace")
y, x = self.display_footer_at()
self.add_line(
y,
x,
footer,
self.make_attributes_list(footer, curses.A_NORMAL),
self.columns - x - 1,
)
def on_ok(self):
pass
def pre_edit_loop(self):
if self.action:
self._page_for_buttons = len(self._pages__) - 1
self.switch_page(self._page_for_buttons)
tmp_rely, tmp_relx = self.nextrely, self.nextrelx
my, mx = self.curses_pad.getmaxyx()
ok_button_text = self.OK_BUTTON_TEXT
my -= self.__class__.OK_BUTTON_BR_OFFSET[0]
mx -= len(ok_button_text) + self.__class__.OK_BUTTON_BR_OFFSET[1]
self.ok_button = self.add_widget(
self.__class__.OKBUTTON_TYPE,
name=ok_button_text,
rely=my,
relx=mx,
use_max_space=True,
)
self._ok_button_postion = len(self._widgets__) - 1
self.nextrely, self.nextrelx = tmp_rely, tmp_relx
self.switch_page(0)
def _during_edit_loop(self):
if self.action:
if self.ok_button.value:
self.editing = False
self.ok_button.value = False
self.edit_return_value = self.on_ok()
def resize(self):
if self.action:
super(CustomFormMultiPage, self).resize()
self.move_ok_button()
def move_ok_button(self):
if self.action:
if hasattr(self, "ok_button"):
my, mx = self.curses_pad.getmaxyx()
my -= self.__class__.OK_BUTTON_BR_OFFSET[0]
mx -= (
len(self.__class__.OK_BUTTON_TEXT)
+ self.__class__.OK_BUTTON_BR_OFFSET[1]
)
self.ok_button.relx = mx
self.ok_button.rely = my
def post_edit_loop(self):
if self.action:
self.switch_page(self._page_for_buttons)
self.ok_button.destroy()
del self._widgets__[self._ok_button_postion]
del self.ok_button
self.display()
self.editing = False
return self.edit_return_value
class CustomEditMenuPopup(fmActionFormV2.ActionFormV2):
DEFAULT_LINES = 12
DEFAULT_COLUMNS = 50
SHOW_ATX = 10
SHOW_ATY = 2
OK_BUTTON_TEXT = "Back"
def create_control_buttons(self):
self._add_button(
"ok_button",
self.__class__.OKBUTTON_TYPE,
self.__class__.OK_BUTTON_TEXT,
0 - self.__class__.OK_BUTTON_BR_OFFSET[0],
0
- self.__class__.OK_BUTTON_BR_OFFSET[1]
- len(self.__class__.OK_BUTTON_TEXT),
None,
)
class CustomAddDictEntryPopup(CustomEditMenuPopup):
OK_BUTTON_TEXT = "Add"
class CustomLoadPopup(ActionPopup):
OK_BUTTON_TEXT = "Load"
CANCEL_BUTTON_TEXT = "Back"
class CustomSavePopup(ActionPopup):
OK_BUTTON_TEXT = "Save"
CANCEL_BUTTON_TEXT = "Back"
class CustomCollectionButton(MiniButtonPress):
def __init__(self, screen, *args, **keywords):
super(CustomCollectionButton, self).__init__(screen, *args, **keywords)
self.color = "DEFAULT"
self.label_width = len(self.name)
def calculate_area_needed(self):
return 1, self.label_width
def update(self, clear=True):
if clear:
self.clear()
if self.hidden:
self.clear()
return False
if self.value and self.do_colors():
self.parent.curses_pad.addstr(
self.rely, self.relx, ">", self.parent.theme_manager.findPair(self)
)
self.parent.curses_pad.addstr(
self.rely,
self.relx + self.width - 1,
"<",
self.parent.theme_manager.findPair(self),
)
elif self.value:
self.parent.curses_pad.addstr(self.rely, self.relx, ">")
self.parent.curses_pad.addstr(self.rely, self.relx + self.width - 1, "<")
if self.editing:
button_state = curses.A_STANDOUT
else:
button_state = curses.A_NORMAL
button_name = self.name
if isinstance(button_name, bytes):
button_name = button_name.decode(self.encoding, "replace")
button_name = button_name.center(self.label_width)
if self.do_colors():
if self.cursor_color:
if self.editing:
button_attributes = self.parent.theme_manager.findPair(
self, self.cursor_color
)
else:
button_attributes = self.parent.theme_manager.findPair(
self, self.color
)
else:
button_attributes = (
self.parent.theme_manager.findPair(self, self.color) | button_state
)
else:
button_attributes = button_state
self.add_line(
self.rely,
self.relx,
button_name,
self.make_attributes_list(button_name, button_attributes),
self.label_width,
)
def custom_notify_wait(*args, **keywords):
notify(*args, **keywords)
curses.napms(1500)
curses.flushinp() | 0.441914 | 0.076201 |
# imports
import PySide.QtCore as __PySide_QtCore
import Shiboken as __Shiboken
class QHttp(__PySide_QtCore.QObject):
# no doc
def abort(self, *args, **kwargs): # real signature unknown
pass
def authenticationRequired(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def bytesAvailable(self, *args, **kwargs): # real signature unknown
pass
def clearPendingRequests(self, *args, **kwargs): # real signature unknown
pass
def close(self, *args, **kwargs): # real signature unknown
pass
def currentDestinationDevice(self, *args, **kwargs): # real signature unknown
pass
def currentId(self, *args, **kwargs): # real signature unknown
pass
def currentRequest(self, *args, **kwargs): # real signature unknown
pass
def currentSourceDevice(self, *args, **kwargs): # real signature unknown
pass
def dataReadProgress(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def dataSendProgress(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def done(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def error(self, *args, **kwargs): # real signature unknown
pass
def errorString(self, *args, **kwargs): # real signature unknown
pass
def get(self, *args, **kwargs): # real signature unknown
pass
def hasPendingRequests(self, *args, **kwargs): # real signature unknown
pass
def head(self, *args, **kwargs): # real signature unknown
pass
def ignoreSslErrors(self, *args, **kwargs): # real signature unknown
pass
def lastResponse(self, *args, **kwargs): # real signature unknown
pass
def post(self, *args, **kwargs): # real signature unknown
pass
def proxyAuthenticationRequired(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def read(self, *args, **kwargs): # real signature unknown
pass
def readAll(self, *args, **kwargs): # real signature unknown
pass
def readyRead(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def request(self, *args, **kwargs): # real signature unknown
pass
def requestFinished(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def requestStarted(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def responseHeaderReceived(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def setHost(self, *args, **kwargs): # real signature unknown
pass
def setProxy(self, *args, **kwargs): # real signature unknown
pass
def setSocket(self, *args, **kwargs): # real signature unknown
pass
def setUser(self, *args, **kwargs): # real signature unknown
pass
def sslErrors(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def state(self, *args, **kwargs): # real signature unknown
pass
def stateChanged(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
Aborted = PySide.QtNetwork.QHttp.Error.Aborted
AuthenticationRequiredError = PySide.QtNetwork.QHttp.Error.AuthenticationRequiredError
Closing = PySide.QtNetwork.QHttp.State.Closing
Connected = PySide.QtNetwork.QHttp.State.Connected
Connecting = PySide.QtNetwork.QHttp.State.Connecting
ConnectionMode = None # (!) real value is "<type 'PySide.QtNetwork.QHttp.ConnectionMode'>"
ConnectionModeHttp = PySide.QtNetwork.QHttp.ConnectionMode.ConnectionModeHttp
ConnectionModeHttps = PySide.QtNetwork.QHttp.ConnectionMode.ConnectionModeHttps
ConnectionRefused = PySide.QtNetwork.QHttp.Error.ConnectionRefused
Error = None # (!) real value is "<type 'PySide.QtNetwork.QHttp.Error'>"
HostLookup = PySide.QtNetwork.QHttp.State.HostLookup
HostNotFound = PySide.QtNetwork.QHttp.Error.HostNotFound
InvalidResponseHeader = PySide.QtNetwork.QHttp.Error.InvalidResponseHeader
NoError = PySide.QtNetwork.QHttp.Error.NoError
ProxyAuthenticationRequiredError = PySide.QtNetwork.QHttp.Error.ProxyAuthenticationRequiredError
Reading = PySide.QtNetwork.QHttp.State.Reading
Sending = PySide.QtNetwork.QHttp.State.Sending
State = None # (!) real value is "<type 'PySide.QtNetwork.QHttp.State'>"
staticMetaObject = None # (!) real value is '<PySide.QtCore.QMetaObject object at 0x00000000038A6048>'
Unconnected = PySide.QtNetwork.QHttp.State.Unconnected
UnexpectedClose = PySide.QtNetwork.QHttp.Error.UnexpectedClose
UnknownError = PySide.QtNetwork.QHttp.Error.UnknownError
WrongContentLength = PySide.QtNetwork.QHttp.Error.WrongContentLength | resources/dot_PyCharm/system/python_stubs/-762174762/PySide/QtNetwork/QHttp.py |
# imports
import PySide.QtCore as __PySide_QtCore
import Shiboken as __Shiboken
class QHttp(__PySide_QtCore.QObject):
# no doc
def abort(self, *args, **kwargs): # real signature unknown
pass
def authenticationRequired(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def bytesAvailable(self, *args, **kwargs): # real signature unknown
pass
def clearPendingRequests(self, *args, **kwargs): # real signature unknown
pass
def close(self, *args, **kwargs): # real signature unknown
pass
def currentDestinationDevice(self, *args, **kwargs): # real signature unknown
pass
def currentId(self, *args, **kwargs): # real signature unknown
pass
def currentRequest(self, *args, **kwargs): # real signature unknown
pass
def currentSourceDevice(self, *args, **kwargs): # real signature unknown
pass
def dataReadProgress(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def dataSendProgress(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def done(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def error(self, *args, **kwargs): # real signature unknown
pass
def errorString(self, *args, **kwargs): # real signature unknown
pass
def get(self, *args, **kwargs): # real signature unknown
pass
def hasPendingRequests(self, *args, **kwargs): # real signature unknown
pass
def head(self, *args, **kwargs): # real signature unknown
pass
def ignoreSslErrors(self, *args, **kwargs): # real signature unknown
pass
def lastResponse(self, *args, **kwargs): # real signature unknown
pass
def post(self, *args, **kwargs): # real signature unknown
pass
def proxyAuthenticationRequired(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def read(self, *args, **kwargs): # real signature unknown
pass
def readAll(self, *args, **kwargs): # real signature unknown
pass
def readyRead(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def request(self, *args, **kwargs): # real signature unknown
pass
def requestFinished(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def requestStarted(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def responseHeaderReceived(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def setHost(self, *args, **kwargs): # real signature unknown
pass
def setProxy(self, *args, **kwargs): # real signature unknown
pass
def setSocket(self, *args, **kwargs): # real signature unknown
pass
def setUser(self, *args, **kwargs): # real signature unknown
pass
def sslErrors(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def state(self, *args, **kwargs): # real signature unknown
pass
def stateChanged(self, *args, **kwargs): # real signature unknown
""" Signal """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
Aborted = PySide.QtNetwork.QHttp.Error.Aborted
AuthenticationRequiredError = PySide.QtNetwork.QHttp.Error.AuthenticationRequiredError
Closing = PySide.QtNetwork.QHttp.State.Closing
Connected = PySide.QtNetwork.QHttp.State.Connected
Connecting = PySide.QtNetwork.QHttp.State.Connecting
ConnectionMode = None # (!) real value is "<type 'PySide.QtNetwork.QHttp.ConnectionMode'>"
ConnectionModeHttp = PySide.QtNetwork.QHttp.ConnectionMode.ConnectionModeHttp
ConnectionModeHttps = PySide.QtNetwork.QHttp.ConnectionMode.ConnectionModeHttps
ConnectionRefused = PySide.QtNetwork.QHttp.Error.ConnectionRefused
Error = None # (!) real value is "<type 'PySide.QtNetwork.QHttp.Error'>"
HostLookup = PySide.QtNetwork.QHttp.State.HostLookup
HostNotFound = PySide.QtNetwork.QHttp.Error.HostNotFound
InvalidResponseHeader = PySide.QtNetwork.QHttp.Error.InvalidResponseHeader
NoError = PySide.QtNetwork.QHttp.Error.NoError
ProxyAuthenticationRequiredError = PySide.QtNetwork.QHttp.Error.ProxyAuthenticationRequiredError
Reading = PySide.QtNetwork.QHttp.State.Reading
Sending = PySide.QtNetwork.QHttp.State.Sending
State = None # (!) real value is "<type 'PySide.QtNetwork.QHttp.State'>"
staticMetaObject = None # (!) real value is '<PySide.QtCore.QMetaObject object at 0x00000000038A6048>'
Unconnected = PySide.QtNetwork.QHttp.State.Unconnected
UnexpectedClose = PySide.QtNetwork.QHttp.Error.UnexpectedClose
UnknownError = PySide.QtNetwork.QHttp.Error.UnknownError
WrongContentLength = PySide.QtNetwork.QHttp.Error.WrongContentLength | 0.555435 | 0.084266 |
import collections
# REST API Public Endpoints
class Constants:
EXCHANGE_NAME = "peatio"
DOMAIN_NAME ="www.coinharbour.com.au"
REST_API_VERSION = "v2"
# Eg. REST_URL = f"http://www.change.me/api/{REST_API_VERSION}/peatio"
# Note: must use https, not http, to POST data
REST_URL = f"https://{DOMAIN_NAME}/api/{REST_API_VERSION}/peatio"
# REST_URL_AUTH = "/api/2"
REST_URL_PRIVATE = f"{REST_URL}/private"
REST_URL_PUBLIC = f"{REST_URL}/public"
WS_API_VERSION = "v2"
# Eg. WS_URL = f"wss://www.change.me/api/{WS_API_VERSION}/ranger"
WS_URL = f"wss://{DOMAIN_NAME}/api/{WS_API_VERSION}/ranger"
WS_URL_PRIVATE = f"{WS_URL}/private"
WS_URL_PUBLIC = f"{WS_URL}/public"
# /api/v2/peatio/public/timestamp
TIME_URL = f"{WS_URL_PUBLIC}/timestamp"
MARKETS_URL = f"{REST_URL}/market"
# https://change.me/api/v2/peatio/market/orders
# TOKEN_URL = "https://accounts.probit.{}/token"
# /api/v2/peatio/public/markets/tickers
EXCHANGE_INFO_URL = f"{REST_URL_PUBLIC}/markets"
TICKER_PRICE_CHANGE_URL = f"{REST_URL_PUBLIC}/markets/tickers"
SINGLE_MARKET_DEPTH_URL = f"{REST_URL_PUBLIC}"+"/markets/{}/depth"
#DIFF_STREAM_URL = f"{WS_URL_PUBLIC}"
WSS_MY_TRADES = "SubscribeMyTrades"
WSS_ORDER_BOOK = "SubscribeOrderBook"
WSS_TRADES = "SubscribeTrades"
WSS_LOGIN = "Login"
OrderBookRow = collections.namedtuple("Book", ["price", "amount"])
ENDPOINT = {
# Public Endpoints
"TICKER": "public/ticker",
"TICKER_SINGLE": "public/ticker/{trading_pair}",
"SYMBOL": "public/symbol",
"ORDER_BOOK": "public/orderbook",
"ORDER_CREATE": "order",
"ORDER_DELETE": "order/{id}",
"ORDER_STATUS": "order/{id}",
"USER_ORDERS": "order",
"USER_BALANCES": "trading/balance",
}
# Order Status Defintions
ORDER_STATUS = [
'New',
'Partially Filled',
'Filled',
'Expired',
'Cancelled',
'Canceling',
'Processing',
'No Balance',
'No Fill'
]
WS_SUB = {
"TRADES": "Trades",
"ORDERS": "Orderbook",
"USER_ORDERS_TRADES": "Reports",
}
WS_METHODS = {
"ORDERS_SNAPSHOT": "snapshotOrderbook",
"ORDERS_UPDATE": "updateOrderbook",
"TRADES_SNAPSHOT": "snapshotTrades",
"TRADES_UPDATE": "updateTrades",
"USER_BALANCE": "getTradingBalance",
"USER_ORDERS": "activeOrders",
"USER_TRADES": "report",
}
# Timeouts
MESSAGE_TIMEOUT = 30.0
PING_TIMEOUT = 10.0
API_CALL_TIMEOUT = 5.0
API_MAX_RETRIES = 4
# Intervals
# Only used when nothing is received from WS
SHORT_POLL_INTERVAL = 5.0
# One minute should be fine since we get trades, orders and balances via WS
LONG_POLL_INTERVAL = 60.0
UPDATE_ORDER_STATUS_INTERVAL = 60.0
# 10 minute interval to update trading rules, these would likely never change whilst running.
INTERVAL_TRADING_RULES = 600
# Trading pair splitter regex
# TRADING_PAIR_SPLITTER = re.compile(r"^(\w+)(AUD|aud|USD|usd|BNB|bnb|USDT|usdt|NZD|nzd|BTC|btc|ETH|eth|BRL|brl|PAX|pax)$")
TRADING_PAIR_SPLITTER = "AUD|aud|USD|usd|BNB|bnb|USDT|usdt|NZD|nzd|BTC|btc|ETH|eth"
# REST API Public Endpoints
GET_TRADING_PAIRS = "/Public/GetPairs"
GET_TRADING_PAIRS_STATS = "/Public/GetPairStats"
GET_MARKET = "/public/markets"
GET_ORDER_BOOK = "/Public/GetOrderBook"
GET_PUBLIC_TRADE_HISTORY = "/Public/GetTradeHistory"
# https://change.me/api/v2/peatio/account/balances
GET_BALANCES = "/account/balances"
# GET_ORDERS = f"{REST_API_VERSION}/Private/GetOrders"
GET_ORDERS = "/market/orders"
GET_DETAILED_BALANCES = "/Private/GetDetailedBalances"
GET_OPEN_ORDERS = "/Private/GetOpenOrders"
GET_PRIVATE_TRADE_HISTORY = "/Private/GetTradeHistory"
PLACE_ORDER = "/Private/PlaceOrders"
MOVE_ORDER = "/Private/MoveOrders"
CANCEL_ORDER = "/Private/CancelOrder"
CANCEL_ALL_ORDERS = "/Private/CancelAllOrders"
# Openware examples
# From https://www.openware.com/sdk/2.6/api/peatio/trading-api.html
# Public API End Points
# https://change.me/api/v2/peatio/public/markets
# /api/v2/admin/peatio/blockchains
# /api/v2/admin/peatio/blockchains/clients
# /api/v2/admin/peatio/blockchains/process_block
# /api/v2/admin/peatio/blockchains/update
# /api/v2/admin/peatio/blockchains/{id}/latest_block
# /api/v2/peatio/public/health/alive
# /api/v2/peatio/public/timestamp
# /api/v2/peatio/public/trading_fees
# /api/v2/peatio/public/version
# /api/v2/peatio/public/webhooks/{event}
# /api/v2/peatio/public/withdraw_limits
# /api/v2/peatio/public/markets
# /api/v2/peatio/public/markets/tickers
# /api/v2/peatio/public/markets/{market}/depth
# /api/v2/peatio/public/markets/{market}/tickers
# /api/v2/peatio/public/markets/{market}/trades
# /api/v2/peatio/public/markets/{market}/order-book
# /api/v2/peatio/public/currencies
# /api/v2/peatio/public/currencies/{id}
# Private API End Points
# https://change.me/api/v2/peatio/account/balances
# https://change.me/api/v2/peatio/market/orders
# https://change.me/api/v2/peatio/market/trade
# /api/v2/peatio/account/stats/pnl
# /api/v2/peatio/account/transactions
# /api/v2/peatio/market/orders
# /api/v2/peatio/market/orders/{id}
# /api/v2/peatio/market/orders/cancel
# For testing:
# http://www.coinharbour.com.au/api/v2/peatio/public/markets
# http://www.coinharbour.com.au/api/v2/peatio/public/markets/tickers
# http://www.coinharbour.com.au/api/v2/peatio/account/balances
# http://www.coinharbour.com.au/api/v2/peatio/market/orders | hummingbot/connector/exchange/peatio/peatio_constants.py |
import collections
# REST API Public Endpoints
class Constants:
EXCHANGE_NAME = "peatio"
DOMAIN_NAME ="www.coinharbour.com.au"
REST_API_VERSION = "v2"
# Eg. REST_URL = f"http://www.change.me/api/{REST_API_VERSION}/peatio"
# Note: must use https, not http, to POST data
REST_URL = f"https://{DOMAIN_NAME}/api/{REST_API_VERSION}/peatio"
# REST_URL_AUTH = "/api/2"
REST_URL_PRIVATE = f"{REST_URL}/private"
REST_URL_PUBLIC = f"{REST_URL}/public"
WS_API_VERSION = "v2"
# Eg. WS_URL = f"wss://www.change.me/api/{WS_API_VERSION}/ranger"
WS_URL = f"wss://{DOMAIN_NAME}/api/{WS_API_VERSION}/ranger"
WS_URL_PRIVATE = f"{WS_URL}/private"
WS_URL_PUBLIC = f"{WS_URL}/public"
# /api/v2/peatio/public/timestamp
TIME_URL = f"{WS_URL_PUBLIC}/timestamp"
MARKETS_URL = f"{REST_URL}/market"
# https://change.me/api/v2/peatio/market/orders
# TOKEN_URL = "https://accounts.probit.{}/token"
# /api/v2/peatio/public/markets/tickers
EXCHANGE_INFO_URL = f"{REST_URL_PUBLIC}/markets"
TICKER_PRICE_CHANGE_URL = f"{REST_URL_PUBLIC}/markets/tickers"
SINGLE_MARKET_DEPTH_URL = f"{REST_URL_PUBLIC}"+"/markets/{}/depth"
#DIFF_STREAM_URL = f"{WS_URL_PUBLIC}"
WSS_MY_TRADES = "SubscribeMyTrades"
WSS_ORDER_BOOK = "SubscribeOrderBook"
WSS_TRADES = "SubscribeTrades"
WSS_LOGIN = "Login"
OrderBookRow = collections.namedtuple("Book", ["price", "amount"])
ENDPOINT = {
# Public Endpoints
"TICKER": "public/ticker",
"TICKER_SINGLE": "public/ticker/{trading_pair}",
"SYMBOL": "public/symbol",
"ORDER_BOOK": "public/orderbook",
"ORDER_CREATE": "order",
"ORDER_DELETE": "order/{id}",
"ORDER_STATUS": "order/{id}",
"USER_ORDERS": "order",
"USER_BALANCES": "trading/balance",
}
# Order Status Defintions
ORDER_STATUS = [
'New',
'Partially Filled',
'Filled',
'Expired',
'Cancelled',
'Canceling',
'Processing',
'No Balance',
'No Fill'
]
WS_SUB = {
"TRADES": "Trades",
"ORDERS": "Orderbook",
"USER_ORDERS_TRADES": "Reports",
}
WS_METHODS = {
"ORDERS_SNAPSHOT": "snapshotOrderbook",
"ORDERS_UPDATE": "updateOrderbook",
"TRADES_SNAPSHOT": "snapshotTrades",
"TRADES_UPDATE": "updateTrades",
"USER_BALANCE": "getTradingBalance",
"USER_ORDERS": "activeOrders",
"USER_TRADES": "report",
}
# Timeouts
MESSAGE_TIMEOUT = 30.0
PING_TIMEOUT = 10.0
API_CALL_TIMEOUT = 5.0
API_MAX_RETRIES = 4
# Intervals
# Only used when nothing is received from WS
SHORT_POLL_INTERVAL = 5.0
# One minute should be fine since we get trades, orders and balances via WS
LONG_POLL_INTERVAL = 60.0
UPDATE_ORDER_STATUS_INTERVAL = 60.0
# 10 minute interval to update trading rules, these would likely never change whilst running.
INTERVAL_TRADING_RULES = 600
# Trading pair splitter regex
# TRADING_PAIR_SPLITTER = re.compile(r"^(\w+)(AUD|aud|USD|usd|BNB|bnb|USDT|usdt|NZD|nzd|BTC|btc|ETH|eth|BRL|brl|PAX|pax)$")
TRADING_PAIR_SPLITTER = "AUD|aud|USD|usd|BNB|bnb|USDT|usdt|NZD|nzd|BTC|btc|ETH|eth"
# REST API Public Endpoints
GET_TRADING_PAIRS = "/Public/GetPairs"
GET_TRADING_PAIRS_STATS = "/Public/GetPairStats"
GET_MARKET = "/public/markets"
GET_ORDER_BOOK = "/Public/GetOrderBook"
GET_PUBLIC_TRADE_HISTORY = "/Public/GetTradeHistory"
# https://change.me/api/v2/peatio/account/balances
GET_BALANCES = "/account/balances"
# GET_ORDERS = f"{REST_API_VERSION}/Private/GetOrders"
GET_ORDERS = "/market/orders"
GET_DETAILED_BALANCES = "/Private/GetDetailedBalances"
GET_OPEN_ORDERS = "/Private/GetOpenOrders"
GET_PRIVATE_TRADE_HISTORY = "/Private/GetTradeHistory"
PLACE_ORDER = "/Private/PlaceOrders"
MOVE_ORDER = "/Private/MoveOrders"
CANCEL_ORDER = "/Private/CancelOrder"
CANCEL_ALL_ORDERS = "/Private/CancelAllOrders"
# Openware examples
# From https://www.openware.com/sdk/2.6/api/peatio/trading-api.html
# Public API End Points
# https://change.me/api/v2/peatio/public/markets
# /api/v2/admin/peatio/blockchains
# /api/v2/admin/peatio/blockchains/clients
# /api/v2/admin/peatio/blockchains/process_block
# /api/v2/admin/peatio/blockchains/update
# /api/v2/admin/peatio/blockchains/{id}/latest_block
# /api/v2/peatio/public/health/alive
# /api/v2/peatio/public/timestamp
# /api/v2/peatio/public/trading_fees
# /api/v2/peatio/public/version
# /api/v2/peatio/public/webhooks/{event}
# /api/v2/peatio/public/withdraw_limits
# /api/v2/peatio/public/markets
# /api/v2/peatio/public/markets/tickers
# /api/v2/peatio/public/markets/{market}/depth
# /api/v2/peatio/public/markets/{market}/tickers
# /api/v2/peatio/public/markets/{market}/trades
# /api/v2/peatio/public/markets/{market}/order-book
# /api/v2/peatio/public/currencies
# /api/v2/peatio/public/currencies/{id}
# Private API End Points
# https://change.me/api/v2/peatio/account/balances
# https://change.me/api/v2/peatio/market/orders
# https://change.me/api/v2/peatio/market/trade
# /api/v2/peatio/account/stats/pnl
# /api/v2/peatio/account/transactions
# /api/v2/peatio/market/orders
# /api/v2/peatio/market/orders/{id}
# /api/v2/peatio/market/orders/cancel
# For testing:
# http://www.coinharbour.com.au/api/v2/peatio/public/markets
# http://www.coinharbour.com.au/api/v2/peatio/public/markets/tickers
# http://www.coinharbour.com.au/api/v2/peatio/account/balances
# http://www.coinharbour.com.au/api/v2/peatio/market/orders | 0.38769 | 0.097605 |
from argparse import ArgumentParser
from datetime import datetime
from enum import Enum
from pathlib import Path
from typing import Optional
import sys
import numpy as np
import numpy.linalg as npl
from qulacs import QuantumCircuit, QuantumState
from qulacs.gate import DenseMatrix, CPTP
from qulacs.state import partial_trace
def renyi(a: float, m) -> float:
assert a >= 0
if a == 1:
return -sum(x * np.log2(x) for x in npl.eigvalsh(m) if x > 0)
else:
return np.log2(np.real(np.trace(npl.matrix_power(m, a)))) / (1 - a)
class EntanglementType(Enum):
from enum import auto
TRIPARTITE = auto()
ENTANGLEMENT_ENTROPY = auto()
def simulate(
n: int,
d: int,
p: float,
a: int,
measure_for: Optional[list[int]] = None,
entanglement_type: EntanglementType = EntanglementType.TRIPARTITE,
entanglement_entropy_for: Optional[list[int]] = None,
) -> float:
assert 0 <= p <= 1
if measure_for is None:
measure_for = list(range(n))
assert all(0 <= i < n for i in measure_for)
if entanglement_type == EntanglementType.TRIPARTITE:
assert entanglement_entropy_for is None
if entanglement_type == EntanglementType.ENTANGLEMENT_ENTROPY:
assert entanglement_entropy_for is not None
circuit = QuantumCircuit(n)
for k in range(d):
for j in range(n // 2):
i = j * 2 + k % 2
circuit.add_random_unitary_gate([i, (i + 1) % n])
for i in measure_for:
sp, sq = np.sqrt(p), np.sqrt(1 - p)
circuit.add_gate(
CPTP(
[
DenseMatrix(i, matrix)
for matrix in [
[[sq, 0], [0, sq]],
[[sp, 0], [0, 0]],
[[0, 0], [0, sp]],
]
]
)
)
state = QuantumState(n)
state.set_zero_state()
circuit.update_quantum_state(state)
if entanglement_type == EntanglementType.TRIPARTITE:
ms = [n * i // 4 for i in range(5)]
ranges = [list(range(ms[i], ms[i + 1])) for i in range(4)]
ret = 0
for i in range(1, 8):
trace_range = [
x for j, r in enumerate(ranges) for x in r if not 1 << j & i << 1
]
coef = bin(i).count("1") % 2 * 2 - 1
entropy = renyi(a, partial_trace(state, trace_range).get_matrix())
ret += coef * entropy
# print(coef, trace_range, entropy)
return ret
elif entanglement_type == EntanglementType.ENTANGLEMENT_ENTROPY:
return renyi(2, partial_trace(state, entanglement_entropy_for).get_matrix())
else:
raise RuntimeError(f"Unsupported entanglement type: {entanglement_type}")
def main():
parser = ArgumentParser()
parser.add_argument("n", type=int)
parser.add_argument("d", type=int)
parser.add_argument("r", type=int)
parser.add_argument("a", type=int)
parser.add_argument("ps", type=str, help="space separated floats")
parser.add_argument("--entanglement-entropy", type=str, help="space separated ints")
parser.add_argument("--measure-for", type=str, help="space separated ints")
parser.add_argument("--output", type=Path)
args = parser.parse_args()
n, d, r, a = args.n, args.d, args.r, args.a
ps = list(map(float, args.ps.split()))
measure_for = None
if args.measure_for is not None:
measure_for = list(map(int, args.measure_for.split()))
entanglement_entropy_args = {}
if args.entanglement_entropy is not None:
entanglement_entropy_args = {
"entanglement_type": EntanglementType.ENTANGLEMENT_ENTROPY,
"entanglement_entropy_for": list(
map(int, args.entanglement_entropy.split())
),
}
output_dir = Path(__file__).parent / "output"
output = (
output_dir
/ f"{datetime.now().strftime('%Y%m%d_%H%M%S')}_pt_n{n:02d}_d{d:02d}_a{a}_r{r:02d}"
)
with open("/dev/null" if args.output else output.with_suffix(".txt"), "w") as f:
f.write(" ".join(map(repr, sys.argv)))
with open(args.output or output.with_suffix(".tsv"), "w") as f:
for p in ps:
ds = [
simulate(n, d, p, a, measure_for, **entanglement_entropy_args)
for _ in range(r)
]
av, std = np.average(ds), np.std(ds)
print(f"{datetime.now()}\t{p=}\t{av}±{std}")
f.write("\t".join(map(str, [p, av, std])) + "\n")
if __name__ == "__main__":
main() | phase_transition.py | from argparse import ArgumentParser
from datetime import datetime
from enum import Enum
from pathlib import Path
from typing import Optional
import sys
import numpy as np
import numpy.linalg as npl
from qulacs import QuantumCircuit, QuantumState
from qulacs.gate import DenseMatrix, CPTP
from qulacs.state import partial_trace
def renyi(a: float, m) -> float:
assert a >= 0
if a == 1:
return -sum(x * np.log2(x) for x in npl.eigvalsh(m) if x > 0)
else:
return np.log2(np.real(np.trace(npl.matrix_power(m, a)))) / (1 - a)
class EntanglementType(Enum):
from enum import auto
TRIPARTITE = auto()
ENTANGLEMENT_ENTROPY = auto()
def simulate(
n: int,
d: int,
p: float,
a: int,
measure_for: Optional[list[int]] = None,
entanglement_type: EntanglementType = EntanglementType.TRIPARTITE,
entanglement_entropy_for: Optional[list[int]] = None,
) -> float:
assert 0 <= p <= 1
if measure_for is None:
measure_for = list(range(n))
assert all(0 <= i < n for i in measure_for)
if entanglement_type == EntanglementType.TRIPARTITE:
assert entanglement_entropy_for is None
if entanglement_type == EntanglementType.ENTANGLEMENT_ENTROPY:
assert entanglement_entropy_for is not None
circuit = QuantumCircuit(n)
for k in range(d):
for j in range(n // 2):
i = j * 2 + k % 2
circuit.add_random_unitary_gate([i, (i + 1) % n])
for i in measure_for:
sp, sq = np.sqrt(p), np.sqrt(1 - p)
circuit.add_gate(
CPTP(
[
DenseMatrix(i, matrix)
for matrix in [
[[sq, 0], [0, sq]],
[[sp, 0], [0, 0]],
[[0, 0], [0, sp]],
]
]
)
)
state = QuantumState(n)
state.set_zero_state()
circuit.update_quantum_state(state)
if entanglement_type == EntanglementType.TRIPARTITE:
ms = [n * i // 4 for i in range(5)]
ranges = [list(range(ms[i], ms[i + 1])) for i in range(4)]
ret = 0
for i in range(1, 8):
trace_range = [
x for j, r in enumerate(ranges) for x in r if not 1 << j & i << 1
]
coef = bin(i).count("1") % 2 * 2 - 1
entropy = renyi(a, partial_trace(state, trace_range).get_matrix())
ret += coef * entropy
# print(coef, trace_range, entropy)
return ret
elif entanglement_type == EntanglementType.ENTANGLEMENT_ENTROPY:
return renyi(2, partial_trace(state, entanglement_entropy_for).get_matrix())
else:
raise RuntimeError(f"Unsupported entanglement type: {entanglement_type}")
def main():
parser = ArgumentParser()
parser.add_argument("n", type=int)
parser.add_argument("d", type=int)
parser.add_argument("r", type=int)
parser.add_argument("a", type=int)
parser.add_argument("ps", type=str, help="space separated floats")
parser.add_argument("--entanglement-entropy", type=str, help="space separated ints")
parser.add_argument("--measure-for", type=str, help="space separated ints")
parser.add_argument("--output", type=Path)
args = parser.parse_args()
n, d, r, a = args.n, args.d, args.r, args.a
ps = list(map(float, args.ps.split()))
measure_for = None
if args.measure_for is not None:
measure_for = list(map(int, args.measure_for.split()))
entanglement_entropy_args = {}
if args.entanglement_entropy is not None:
entanglement_entropy_args = {
"entanglement_type": EntanglementType.ENTANGLEMENT_ENTROPY,
"entanglement_entropy_for": list(
map(int, args.entanglement_entropy.split())
),
}
output_dir = Path(__file__).parent / "output"
output = (
output_dir
/ f"{datetime.now().strftime('%Y%m%d_%H%M%S')}_pt_n{n:02d}_d{d:02d}_a{a}_r{r:02d}"
)
with open("/dev/null" if args.output else output.with_suffix(".txt"), "w") as f:
f.write(" ".join(map(repr, sys.argv)))
with open(args.output or output.with_suffix(".tsv"), "w") as f:
for p in ps:
ds = [
simulate(n, d, p, a, measure_for, **entanglement_entropy_args)
for _ in range(r)
]
av, std = np.average(ds), np.std(ds)
print(f"{datetime.now()}\t{p=}\t{av}±{std}")
f.write("\t".join(map(str, [p, av, std])) + "\n")
if __name__ == "__main__":
main() | 0.581303 | 0.578508 |
import pandas as pd
pd.set_option('display.max_columns', None)
import altair as alt
import streamlit as st
alt.data_transformers.disable_max_rows()
#IMPORTING THE DATA
waterdf = pd.read_csv("https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/master/water.csv", on_bad_lines='skip', encoding = "ISO-8859-1")
sanitdf = pd.read_csv("https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/master/sanitation.csv", on_bad_lines='skip', encoding = "ISO-8859-1")
#INSPECTING THE DATA AND CLEANING OF DATA
#DATA NO.1 : WATER
print(waterdf.shape)
print(waterdf.describe())
print(waterdf.isna().sum())
waterdf = waterdf.dropna(subset=['POP_THOUS'])
print(waterdf.shape)
waterdf.isnull().sum()
waterdf['POP_THOUS'] = waterdf['POP_THOUS'].str.replace(' ', '')
waterdf['POP_THOUS'] = waterdf['POP_THOUS'].astype(int)
waterdf['YEAR_STR'] = waterdf['YEAR'].astype(str)
waterdf['YEAR'] = waterdf['YEAR'].astype(float)
print(waterdf.head())
print(waterdf.describe())
waterpie = waterdf[['COUNTRY','YEAR','YEAR_STR','BASIC_WAT_NAT','LIMITED_WAT_NAT','UNIMPROVED_WAT_NAT','SURFACE_WAT_NAT']]
waterpie_melt = pd.melt(waterpie, id_vars=['COUNTRY','YEAR','YEAR_STR'], value_vars=['BASIC_WAT_NAT','LIMITED_WAT_NAT','UNIMPROVED_WAT_NAT','SURFACE_WAT_NAT'])
#DATA NO.2 : SANITATION
print(sanitdf.shape)
print(sanitdf.describe())
print(sanitdf.isnull().sum())
sanitdf = sanitdf.dropna(subset=['POP_THOUS'])
print(sanitdf.shape)
sanitdf.isnull().sum()
sanitdf['POP_THOUS'] = sanitdf['POP_THOUS'].str.replace(' ', '')
sanitdf['POP_THOUS'] = sanitdf['POP_THOUS'].astype(int)
sanitdf['YEAR'] = sanitdf['YEAR'].astype(float)
sanitdf.head()
sanitpie = sanitdf[['COUNTRY','YEAR','BASIC_SAN_NAT','LIMITED_SHARED_SAN_NAT','UNIMPROVED_SAN_NAT','OPENDEFECATION_SAN_NAT']]
sanitpie_melt = pd.melt(sanitpie, id_vars=['COUNTRY','YEAR'], value_vars=['BASIC_SAN_NAT','LIMITED_SHARED_SAN_NAT','UNIMPROVED_SAN_NAT','OPENDEFECATION_SAN_NAT'])
##TITLE AND INTRO
st.title("UN SDG 6: Clean Water and Sanitation")
st.subheader("An Exploratory Visualization Application to Find Key Insights")
st.image("https://blantyre.dorium.community/uploads/default/optimized/1X/6fc93ea6f54ff0312e52bf977c07f91e35efdf40_2_1035x322.jpeg")
st.write("United Nations has gloabally designed several Sustainable Developement Goals(SDGs) as actions to end poverty, protect the planet and ensure peace and prosperity for human beings. SDGs are the extensions of Millenium Developement Goals(MDGs), which were started in the year 2000 to serve the same purpose. SDG-6 is to ensure availability and safe and sustainable management of water and sanitation for all. This project analyzes overall developement of countries around the world, towards safely managing drinking water and sanitation.")
##WORLD POPULATION SLIDER
st.header("1. Growth in World Population over Time")
st.image('https://unstats.un.org/sdgs/assets/img/sliders/2017-Regions-E-large.png')
st.write("The United Nations categorized the world nations in Eight Major Regions, viz.,",
"'Sub-Saharan Africa', 'Northern & Western Africa', 'Central & Southern Asia', 'Eastern & South-Eastern Asia'",
", 'Latin America & the Caribbean', 'Australia & New-Zealand','Oceania', and 'Europe & Northern America'.")
slider1 = alt.binding_range(min=2000, max=2020, step=1, name='Select year:')
select_year1 = alt.selection_single(name="YEAR", fields=['YEAR'],
bind=slider1, init={'YEAR': 2000})
popsdgchart = alt.Chart(waterdf).mark_bar(tooltip=True).encode(
y = alt.Y('POP_THOUS',
axis=alt.Axis(title='Population (in 1000s)'), sort='-x',
scale=alt.Scale(domain=(0, 2400000))),
x = alt.X('SDG region:O',
axis=alt.Axis(title='SDG Regions'),
scale=alt.Scale(zero=False), sort='y'
),
color= alt.Color('COUNTRY:O', legend = None, scale=alt.Scale(scheme='plasma'))
).properties(
width = 300,
height = 300,
title="Population (2000-2020): SDG Regions"
).transform_filter(
select_year1
).add_selection(
select_year1
)
popyearchart = alt.Chart(waterdf).mark_bar(tooltip=True).encode(
y = alt.Y('POP_THOUS',
axis=alt.Axis(title='Population (in 1000s)'), sort='-x',
scale=alt.Scale(domain=(0, 1600000))),
x = alt.X('COUNTRY:O',
axis=alt.Axis(title='Countries'),
scale=alt.Scale(zero=False), sort='-y'
),
color= alt.Color('COUNTRY', legend = None, scale=alt.Scale(scheme='plasma'))
).transform_filter(
select_year1
).add_selection(
select_year1
).transform_filter(
alt.datum.POP_THOUS > 40000
).properties(
width = 400,
height = 300,
title="Population (2000-2020): World Nations"
)
popgrowth= alt.concat(
popsdgchart, popyearchart
).resolve_scale(
color='independent'
).configure_view(
stroke=None
)
st.altair_chart(popgrowth, use_container_width=True)
st.caption("Growth in World's Population over Time (2000-2020) (Interactive)")
st.write("**Interactivity Guide:** Move the slider, hover on the bars to view more details...")
st.write("The world population grew exponentially from around 6 Billion in 2000 to about 8 Billion by 2020! This steep rise in population put great stress on the world economies to ensuring clean potable drinking water and safe sanitation to each and every human being on the planet. Population is an important and consistently growing parameter on which, developement of any nation largely depends. This section shows a pair of histograms depicting population growth in different countries and different SDG Regions in the the world between from the year 2000 to 2020. ")
st.subheader("***🔑 Key Insight***")
st.write("*Notice the steep 30% increase in India's population. Compare it with China's and USA's population over the past 20 years!*")
## PART A - CLEAN WATER
st.header("2. Drinking Water")
## THE WATER CORRELATION MATRIX
st.write("The data obtained has 10 different parameters [Link to Variable Dictionary](https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/f367084a4fef6684455252465e3bd7f6e9ae9a67/Dictionary%20-%20water.csv). To visualize the correlation (connection) between these parameters, a correlation matrix is plotted. Many parameters show strong correlation among themselves.")
# THE MATRIX
cor_data = (waterdf[['BASIC_WAT_NAT', 'LIMITED_WAT_NAT', 'UNIMPROVED_WAT_NAT', 'SURFACE_WAT_NAT', 'SAFELY_MANAGED_NAT', 'ACCESS_ONPREMISE_NAT', 'AVAIL_WHEN_NEEDED_NAT',
'NON_CONTAMIN_NAT', 'PIPED_NAT', 'NONPIPED_NAT']]
).corr().stack().reset_index().rename(columns={0: 'correlation', 'level_0': 'variable1', 'level_1': 'variable2'})
cor_data['correlation_label'] = cor_data['correlation'].map('{:.2f}'.format) # Round to 2 decimal
base = alt.Chart(cor_data).encode(
x='variable2:O',
y='variable1:O'
)
text = base.mark_text().encode(
text='correlation_label',
color=alt.condition(
alt.datum.correlation > 0.1,
alt.value('black'),
alt.value('white')
)
)
## THE HEATMAP
cor_plot = base.mark_rect().encode(
color=alt.Color('correlation:Q', scale=alt.Scale(scheme='plasma'))
).properties(
width=700,
height=500,
title="The Correlation Matrix: Drinking Water"
)
st.altair_chart((cor_plot + text))
st.caption("Correlation Matrix for Water Feature Data")
st.write("The SDG is to ensure clean drinking water, hence the most important parameter is 'Non Contaminated Water', which shows significantly high (80%) correlation with the 'Piped Water'. This indicates that as the piped water networks increase, the delivery of non-contaminated water increases.")
## CLASSIFICATION OF DRINKING WATER INFRASTRUCTURE/ METHODS
st.header("2.1. Classification of Drinking Water Infrastructure/ Methods")
st.write("From ancient ground/surface water withdrawl to modern pipe networks, methods of access to drinking water are developing continuously. The scatter plot in this section shows increase in population of different countries having access to safe/purified piped water through 20 years. The different dot sizes depict population of a country. ")
selection = alt.selection_single(fields=['YEAR_STR','COUNTRY'])
pipedwaterchart = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x=alt.X('YEAR_STR:O', axis=alt.Axis(title='Year')),
y=alt.Y('PIPED_NAT', axis=alt.Axis(title='% Population with Piped Water Connections')),
size='POP_THOUS',
#shape='SDG region',
color = alt.Color('COUNTRY', scale=alt.Scale(scheme='plasma')),
tooltip='COUNTRY'
).add_selection(selection).encode(
color=alt.condition(selection, "COUNTRY", alt.value("grey"), legend=None, scale=alt.Scale(scheme='plasma'))
).properties(
title="Increase in Access to Piped Water Connections over Time",
width=400
)
nationpie = alt.Chart(waterpie_melt).mark_arc().encode(
theta=alt.Theta(field='mean_value', type="quantitative"),
color=alt.Color('variable', scale=alt.Scale(scheme='plasma')),
tooltip=('variable:O', 'mean_value:Q')
).transform_filter(
selection
).transform_aggregate(
mean_value='mean(value):Q',
groupby=["variable"]
).properties(
title="Access to Drinking Water"
)
chart_pie = alt.hconcat(
pipedwaterchart , nationpie
).resolve_scale(
color='independent'
).configure_view(
stroke=None
)
st.altair_chart(chart_pie)
st.caption("Increase in Access to Piped Drinking Water (left) and Type of Access to Drinking Water (right) (Interactive)")
st.write("**Interactivity Guide:** Hover/ Click the 'Country' beads to see the pie change adaptively for the selected Country and Year. To deselect click on whitespace...")
st.write("As we hover over the graph, the tooltip (cursor) shows name of the country of a particular data point. Single Selection which acts as a dynamic query filter, enables user to click on any point and disaply its details on-demand in the form of a pie chart, alongside. The pie chart shows the accessability to Basic, Limited, Unimproved or Surface water in each country. This gives overall idea of the country's water infrastructure.")
st.subheader("***🔑 Key Insight***")
st.write("*Notice how China enhances delivery of drinking water to 80% of its people with Piped Water Connections in 2020 from a 50% in 2000. India clearly needs to improve its delivery through piped water connectivity. This is a clear indication why the Indian Government started heavily investing in schemes like 'Jal Jeevan Mission' (https://jaljeevanmission.gov.in/) that envisions to provide safe and adequate drinking water through individual household tap connections by 2024 to all households in rural India.*")
## PERFORMANCE OF COUNTRIES IN DELIVERING NONCONTAMINATED DRINKING WATER
st.header("2.2. Performance by Nations in Delivering Non-contaminated, Safe Drinking Water to its Citizens")
st.write("As the goal of the SDG is to provide clean/safe drinking water to all, the scatter plots are created to show World Population vs. Safely Managed Water, Non-Contaminated vs. Non-Piped Water, and Non-Contaminated vs. Piped Water. The different dot sizes depict population of a country. The Slider of Years, help dynamically compare the progress of different nations over the time. ")
slider2 = alt.binding_range(min=2000, max=2020, step=1, name='Select year:')
select_year2 = alt.selection_single(name="YEAR", fields=['YEAR'],
bind=slider2, init={'YEAR': 2000})
## NSCM - Non Contaminated VS Safely Managed
NCSM = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x = alt.X('SAFELY_MANAGED_NAT'),
y = alt.Y('NON_CONTAMIN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
title="Safely Managed Non Contaminated Drinking Water",
width = 500,
height = 250
)
## NCNP Non Contaminated VS NON Piped
NCNP = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x = alt.X('NONPIPED_NAT'),
y = alt.Y('NON_CONTAMIN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
title="Non Piped Access to Non Contaminated Drinking Water",
width = 250,
height = 250,
)
## NCAWN Non Contaminated VS Availability When Needed
NCP = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x = alt.X('AVAIL_WHEN_NEEDED_NAT',),
y = alt.Y('NON_CONTAMIN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
title="Availability of Non Contaminated Drinking Water When Needed",
width = 250,
height = 250,
)
worldpop = alt.Chart(waterdf).mark_bar().encode(
x="YEAR:O",
y=alt.Y("sum(POP_THOUS):Q",scale=alt.Scale(domain=(0,8000000)),axis=alt.Axis(title='World Population (in 1000s)')),
color=alt.Color('YEAR:N', scale=alt.Scale(scheme='plasma', zero=False)),
tooltip = 'YEAR'
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
height= 250)
st.write(alt.concat(
(worldpop | NCSM)& (NCNP | NCP)
).resolve_scale(
color='shared'
).configure_view(
stroke=None
)
)
st.caption("Performance by Nations in Delivering Safely Managed Drinking Water to its Citizens(Interactive)")
st.write("**Interactivity Guide:** Move the slider to and fro to visualize. Hover on the circles to identify the country.")
st.write("For most of the countries, the parameters in all the three graphs show clear relation. Non-Contaminated water increases as the Safe management of water increases. Non-piped water increases/decreases, Non-contaminated water decreases/increases. Non-contaminated water increases as Pipe water increases.")
st.subheader("***🔑 Key Insight***")
st.write("*While most of the countries in the World are improving their water infrastructure systems, these charts help us identify the countries with poor development or the ones that need drastic positive changes. Notice Pakistan (near (x=40,y=40)) moving in opposite direction as compared to the rest of world indicating it has failed to provide any improvement in delivering non-contaminated safely managed clean drinking water to its citizens. The lower left chart shows Pakistan, Nigeria, and Ethiopia witnessed increase in proportion of its people having non-piped access to fairly contaminated drinking water. The lower right chart shows that Ethiopia and Nigeria ensured improvement in availability of water when its needed to its citizens but the quality of water fairly contaminated, whereas Pakistan couldn't ensure any development in both the parameters.*")
#########################################################################################################################
## PART B - SANITATION
st.header("3. Sanitation")
## THE SANITATION CORRELATION MATRIX
st.write("Sanitatary waste-water systems have been a tremendously neglected infrastructure, especially in the developing and under-developed countries. The data obtained has 11 different parameters [Link to Variable Dictionary](https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/f367084a4fef6684455252465e3bd7f6e9ae9a67/Dictionary%20-%20sanitary.csv). To visualize the correlation (connection) between these parameters, a correlation matrix is plotted. A couple of parameters show strong correlation among themselves.")
sanit_cor_data = (sanitdf[['BASIC_SAN_NAT', 'LIMITED_SHARED_SAN_NAT', 'UNIMPROVED_SAN_NAT', 'OPENDEFECATION_SAN_NAT', 'SAFELYMANAGED_SAN_NAT', 'DISPOSED_INSITU_SAN_NAT', 'EMP_TREAT_SAN_NAT',
'WW_TREATED_SAN_NAT', 'LATRINES_SAN_NAT', 'SEPTICTANKS_SAN_NAT', 'SEWERCONNECTION_SAN_NAT']]
).corr().stack().reset_index().rename(columns={0: 'correlation', 'level_0': 'variable1', 'level_1': 'variable2'})
sanit_cor_data['correlation_label'] = sanit_cor_data['correlation'].map('{:.2f}'.format) # Round to 2 decimal
s_base = alt.Chart(sanit_cor_data).encode(
x='variable2:O',
y='variable1:O'
)
text = s_base.mark_text().encode(
text='correlation_label',
color=alt.condition(
alt.datum.correlation > 0.1,
alt.value('black'),
alt.value('white')
)
)
# The correlation heatmap
sanit_cor_plot = s_base.mark_rect().encode(
color=alt.Color('correlation:Q', scale=alt.Scale(scheme='plasma'))
).properties(
width=700,
height=500,
title = "The Correlation Matrix: Sanitation"
)
st.write(sanit_cor_plot + text)
st.caption("Correlation Matrix for Sanitation Feature Data")
st.write("The SDG is to ensure safe management of sanitary waste, hence the most important parameter is 'Safely Managed Sanitary SYstem', which shows significantly high (91%) correlation with the 'Sewer Connection'. This indicates that as the Connections to Sewer Networks increase, the Safe Management of Sewer Waste increases.")
## CLASSIFICATION OF SANITATION SEWERAGE INFRASTRUCTURE/ METHODS
st.header("3.1. Classification of Sewerage Infrastructure/ Methods")
st.write("Although Open-defecation is extremely unhygenic, a large number of world-population rely on it. However, the situation is slowly changing. Most of the countries have underground and safe sewer-systems on their developement agenda. The scatter-plot in this section shows increase in population having Sewerage Connection, over 20 years. The different dot sizes depict population of a country. As we hover over the graph, the tooltip (cursor) shows name of the country of a particular data point. Single Selection which acts as a dynamic query filter, enables user to click on any point and disaply its details on-demand in the form of a pie chart, alongside.")
s_selection = alt.selection_single(fields=['YEAR','COUNTRY'])
sewerconnectionchart = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x=alt.X('YEAR:O',axis=alt.Axis(title='Year')),
y=alt.Y('SEWERCONNECTION_SAN_NAT', axis=alt.Axis(title='% Population with Sewerage Connections')),
size='POP_THOUS',
#shape='SDG region',
color = alt.Color('COUNTRY', scale=alt.Scale(scheme='plasma')),
tooltip='COUNTRY'
).add_selection(s_selection).encode(
color=alt.condition(s_selection, "COUNTRY", alt.value("grey"), legend=None, scale=alt.Scale(scheme='plasma')),
).properties(
title="Increase in Underground Sewerage Over Time",
width=400
)
s_nationpie = alt.Chart(sanitpie_melt).mark_arc().encode(
theta=alt.Theta(field='mean_value', type="quantitative"),
color=alt.Color('variable', scale=alt.Scale(scheme='plasma')),
tooltip=('variable:O', 'mean_value:Q')
).transform_filter(
s_selection
).transform_aggregate(
mean_value='mean(value):Q',
groupby=["variable"]
).properties(
title="Disposal Method of Sanitary Waste"
)
st.write(alt.hconcat(
sewerconnectionchart , s_nationpie
).resolve_scale(
color='independent'
).configure_view(
stroke=None
))
st.caption("Increase in Underground Sewerage (left) and Type of Disposal of Sanitary Waste (right) (Interactive)")
st.write("**Interactivity Guide:** Hover/ Click the 'Country' beads to see the pie change adaptively for the selected Country and Year. To deselect click on whitespace...")
st.write("The pie chart shows classification of Sewerage Infrastructure in Basic, imited-shared, Unimproved Sanition and Open defecation. It gives over-all idea of the country's sewerage infrastructure and availability of safely managed sewerage systems. Most of the countries show significant improvement in 20 years.")
st.subheader("***🔑 Key Insight***")
st.write("*China's impressive development in connecting its cities to underground sewerage systems. Notice that India needs to make massive investments in improving its sewerage infrastructure. Notice that India reduces the percentage of open defecation from 74% in 2000 to 15% in 2020!*")
## PERFORMANCE OF COUNTRIES IN DELIVERING NONCONTAMINATED DRINKING WATER
st.header("3.2. Performance by Nations in Safe Collection and Disposal of Sanitary Wastewater from its Citizens")
st.write("SDGs aim to irradicate open defecation and provide safely managed sewerage infrastructure to the people. This section contains scatter plots showing Treated Waste-Water vs. Safely Managed Sanitary System. As their names suggest, these are interdependent and most of the countries show relative progress in these two parameters. The scatter-plot of Treated Waste-Water vs. Open-defecation shows that irradicating open-defecation is a slow yet continuously progresssing process. ")
s_slider = alt.binding_range(min=2000, max=2020, step=1, name='Select year:')
s_select_year = alt.selection_single(name="YEAR", fields=['YEAR'],
bind=s_slider, init={'YEAR': 2000})
## WTSM WW Treated vs. Safely Managed
WTSF = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x = alt.X('SAFELYMANAGED_SAN_NAT'),
y = alt.Y('WW_TREATED_SAN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
title="Safely Managed and Treated Wastewater",
width=500,
height=250)
## WTOD WW Treated vs. Open Defecation
WTOD = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x = alt.X('OPENDEFECATION_SAN_NAT'),
y = alt.Y('WW_TREATED_SAN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
title="Wastewater Treatment vs. Open Defecation",
width=250,
height=250)
## SWC WW Treated vs. Sewer Connection
WTSC = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x = alt.X('SEWERCONNECTION_SAN_NAT'),
y = alt.Y('WW_TREATED_SAN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
title="Wastewater Treatment vs. Sewerage Connectivity",
width=250,
height=250)
s_worldpop = alt.Chart(sanitdf).mark_bar().encode(
x="YEAR:O",
y=alt.Y("sum(POP_THOUS):Q",scale=alt.Scale(domain=(0,8000000)),axis=alt.Axis(title='World Population (in 1000s)')),
color=alt.Color('YEAR:N', scale=alt.Scale(scheme='plasma', zero=False)),
tooltip = 'YEAR'
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
height=250)
st.write(alt.concat(
(s_worldpop | WTSF) & (WTOD | WTSC)
).resolve_scale(
color='shared'
).configure_view(
stroke=None
))
st.caption("Performance by Nations in Safe Collection and Disposal of Sanitary Wastewater(Interactive)")
st.write("**Interactivity Guide:** Move the slider to and fro to visualize.")
st.write("Waste-water can be treated only when it is connected to a sewer system, is collected and carried to a treatment plant. The third scatter plot in this section, Treated Waste-Water vs. Sewer Connections show almost direct relation for most of the countries. The different dot sizes depict population of a country. The Slider of Years, help dynamically compare the progress of different nations over the time. For most of the countries, the parameters in all the three graphs show clear relation.")
st.subheader("***🔑 Key Insight***")
st.write("*These charts help us identify the countries with poor development or the ones that need drastic positive changes. In the upper chart notice that on one hand India seems to struggle in treating wastewater but also shows drastic improvement in safely managing the waterwater. The lower two charts help us understand why! Observe the lower two charts carefully, India reduces open defecation but there is almost no increase in proportional treatment of wastewater. This is primarily because India conventionally has decentralized sanitation, meaning the absence of a centralized sanitary wastewater collection and treatment infrastructure. It ensures the reduction in open defecation essentially by having in-situ septic tanks which are not connected to a centralized underground wastewater network infrastructure.*")
st.markdown("***Data Source:** WHO-UNICEF JOINT MONITORING PROGRAM [Webpage](https://washdata.org/how-we-work/sdg-monitoring).*")
st.markdown("This project was created by [<NAME>](https://www.linkedin.com/in/tanaykulkarni/) and [<NAME>](https://www.linkedin.com/in/devashrikarve/) for the [Interactive Data Science](https://dig.cmu.edu/ids2022) course at [Carnegie Mellon University](https://www.cmu.edu).") | streamlit_app.py | import pandas as pd
pd.set_option('display.max_columns', None)
import altair as alt
import streamlit as st
alt.data_transformers.disable_max_rows()
#IMPORTING THE DATA
waterdf = pd.read_csv("https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/master/water.csv", on_bad_lines='skip', encoding = "ISO-8859-1")
sanitdf = pd.read_csv("https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/master/sanitation.csv", on_bad_lines='skip', encoding = "ISO-8859-1")
#INSPECTING THE DATA AND CLEANING OF DATA
#DATA NO.1 : WATER
print(waterdf.shape)
print(waterdf.describe())
print(waterdf.isna().sum())
waterdf = waterdf.dropna(subset=['POP_THOUS'])
print(waterdf.shape)
waterdf.isnull().sum()
waterdf['POP_THOUS'] = waterdf['POP_THOUS'].str.replace(' ', '')
waterdf['POP_THOUS'] = waterdf['POP_THOUS'].astype(int)
waterdf['YEAR_STR'] = waterdf['YEAR'].astype(str)
waterdf['YEAR'] = waterdf['YEAR'].astype(float)
print(waterdf.head())
print(waterdf.describe())
waterpie = waterdf[['COUNTRY','YEAR','YEAR_STR','BASIC_WAT_NAT','LIMITED_WAT_NAT','UNIMPROVED_WAT_NAT','SURFACE_WAT_NAT']]
waterpie_melt = pd.melt(waterpie, id_vars=['COUNTRY','YEAR','YEAR_STR'], value_vars=['BASIC_WAT_NAT','LIMITED_WAT_NAT','UNIMPROVED_WAT_NAT','SURFACE_WAT_NAT'])
#DATA NO.2 : SANITATION
print(sanitdf.shape)
print(sanitdf.describe())
print(sanitdf.isnull().sum())
sanitdf = sanitdf.dropna(subset=['POP_THOUS'])
print(sanitdf.shape)
sanitdf.isnull().sum()
sanitdf['POP_THOUS'] = sanitdf['POP_THOUS'].str.replace(' ', '')
sanitdf['POP_THOUS'] = sanitdf['POP_THOUS'].astype(int)
sanitdf['YEAR'] = sanitdf['YEAR'].astype(float)
sanitdf.head()
sanitpie = sanitdf[['COUNTRY','YEAR','BASIC_SAN_NAT','LIMITED_SHARED_SAN_NAT','UNIMPROVED_SAN_NAT','OPENDEFECATION_SAN_NAT']]
sanitpie_melt = pd.melt(sanitpie, id_vars=['COUNTRY','YEAR'], value_vars=['BASIC_SAN_NAT','LIMITED_SHARED_SAN_NAT','UNIMPROVED_SAN_NAT','OPENDEFECATION_SAN_NAT'])
##TITLE AND INTRO
st.title("UN SDG 6: Clean Water and Sanitation")
st.subheader("An Exploratory Visualization Application to Find Key Insights")
st.image("https://blantyre.dorium.community/uploads/default/optimized/1X/6fc93ea6f54ff0312e52bf977c07f91e35efdf40_2_1035x322.jpeg")
st.write("United Nations has gloabally designed several Sustainable Developement Goals(SDGs) as actions to end poverty, protect the planet and ensure peace and prosperity for human beings. SDGs are the extensions of Millenium Developement Goals(MDGs), which were started in the year 2000 to serve the same purpose. SDG-6 is to ensure availability and safe and sustainable management of water and sanitation for all. This project analyzes overall developement of countries around the world, towards safely managing drinking water and sanitation.")
##WORLD POPULATION SLIDER
st.header("1. Growth in World Population over Time")
st.image('https://unstats.un.org/sdgs/assets/img/sliders/2017-Regions-E-large.png')
st.write("The United Nations categorized the world nations in Eight Major Regions, viz.,",
"'Sub-Saharan Africa', 'Northern & Western Africa', 'Central & Southern Asia', 'Eastern & South-Eastern Asia'",
", 'Latin America & the Caribbean', 'Australia & New-Zealand','Oceania', and 'Europe & Northern America'.")
slider1 = alt.binding_range(min=2000, max=2020, step=1, name='Select year:')
select_year1 = alt.selection_single(name="YEAR", fields=['YEAR'],
bind=slider1, init={'YEAR': 2000})
popsdgchart = alt.Chart(waterdf).mark_bar(tooltip=True).encode(
y = alt.Y('POP_THOUS',
axis=alt.Axis(title='Population (in 1000s)'), sort='-x',
scale=alt.Scale(domain=(0, 2400000))),
x = alt.X('SDG region:O',
axis=alt.Axis(title='SDG Regions'),
scale=alt.Scale(zero=False), sort='y'
),
color= alt.Color('COUNTRY:O', legend = None, scale=alt.Scale(scheme='plasma'))
).properties(
width = 300,
height = 300,
title="Population (2000-2020): SDG Regions"
).transform_filter(
select_year1
).add_selection(
select_year1
)
popyearchart = alt.Chart(waterdf).mark_bar(tooltip=True).encode(
y = alt.Y('POP_THOUS',
axis=alt.Axis(title='Population (in 1000s)'), sort='-x',
scale=alt.Scale(domain=(0, 1600000))),
x = alt.X('COUNTRY:O',
axis=alt.Axis(title='Countries'),
scale=alt.Scale(zero=False), sort='-y'
),
color= alt.Color('COUNTRY', legend = None, scale=alt.Scale(scheme='plasma'))
).transform_filter(
select_year1
).add_selection(
select_year1
).transform_filter(
alt.datum.POP_THOUS > 40000
).properties(
width = 400,
height = 300,
title="Population (2000-2020): World Nations"
)
popgrowth= alt.concat(
popsdgchart, popyearchart
).resolve_scale(
color='independent'
).configure_view(
stroke=None
)
st.altair_chart(popgrowth, use_container_width=True)
st.caption("Growth in World's Population over Time (2000-2020) (Interactive)")
st.write("**Interactivity Guide:** Move the slider, hover on the bars to view more details...")
st.write("The world population grew exponentially from around 6 Billion in 2000 to about 8 Billion by 2020! This steep rise in population put great stress on the world economies to ensuring clean potable drinking water and safe sanitation to each and every human being on the planet. Population is an important and consistently growing parameter on which, developement of any nation largely depends. This section shows a pair of histograms depicting population growth in different countries and different SDG Regions in the the world between from the year 2000 to 2020. ")
st.subheader("***🔑 Key Insight***")
st.write("*Notice the steep 30% increase in India's population. Compare it with China's and USA's population over the past 20 years!*")
## PART A - CLEAN WATER
st.header("2. Drinking Water")
## THE WATER CORRELATION MATRIX
st.write("The data obtained has 10 different parameters [Link to Variable Dictionary](https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/f367084a4fef6684455252465e3bd7f6e9ae9a67/Dictionary%20-%20water.csv). To visualize the correlation (connection) between these parameters, a correlation matrix is plotted. Many parameters show strong correlation among themselves.")
# THE MATRIX
cor_data = (waterdf[['BASIC_WAT_NAT', 'LIMITED_WAT_NAT', 'UNIMPROVED_WAT_NAT', 'SURFACE_WAT_NAT', 'SAFELY_MANAGED_NAT', 'ACCESS_ONPREMISE_NAT', 'AVAIL_WHEN_NEEDED_NAT',
'NON_CONTAMIN_NAT', 'PIPED_NAT', 'NONPIPED_NAT']]
).corr().stack().reset_index().rename(columns={0: 'correlation', 'level_0': 'variable1', 'level_1': 'variable2'})
cor_data['correlation_label'] = cor_data['correlation'].map('{:.2f}'.format) # Round to 2 decimal
base = alt.Chart(cor_data).encode(
x='variable2:O',
y='variable1:O'
)
text = base.mark_text().encode(
text='correlation_label',
color=alt.condition(
alt.datum.correlation > 0.1,
alt.value('black'),
alt.value('white')
)
)
## THE HEATMAP
cor_plot = base.mark_rect().encode(
color=alt.Color('correlation:Q', scale=alt.Scale(scheme='plasma'))
).properties(
width=700,
height=500,
title="The Correlation Matrix: Drinking Water"
)
st.altair_chart((cor_plot + text))
st.caption("Correlation Matrix for Water Feature Data")
st.write("The SDG is to ensure clean drinking water, hence the most important parameter is 'Non Contaminated Water', which shows significantly high (80%) correlation with the 'Piped Water'. This indicates that as the piped water networks increase, the delivery of non-contaminated water increases.")
## CLASSIFICATION OF DRINKING WATER INFRASTRUCTURE/ METHODS
st.header("2.1. Classification of Drinking Water Infrastructure/ Methods")
st.write("From ancient ground/surface water withdrawl to modern pipe networks, methods of access to drinking water are developing continuously. The scatter plot in this section shows increase in population of different countries having access to safe/purified piped water through 20 years. The different dot sizes depict population of a country. ")
selection = alt.selection_single(fields=['YEAR_STR','COUNTRY'])
pipedwaterchart = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x=alt.X('YEAR_STR:O', axis=alt.Axis(title='Year')),
y=alt.Y('PIPED_NAT', axis=alt.Axis(title='% Population with Piped Water Connections')),
size='POP_THOUS',
#shape='SDG region',
color = alt.Color('COUNTRY', scale=alt.Scale(scheme='plasma')),
tooltip='COUNTRY'
).add_selection(selection).encode(
color=alt.condition(selection, "COUNTRY", alt.value("grey"), legend=None, scale=alt.Scale(scheme='plasma'))
).properties(
title="Increase in Access to Piped Water Connections over Time",
width=400
)
nationpie = alt.Chart(waterpie_melt).mark_arc().encode(
theta=alt.Theta(field='mean_value', type="quantitative"),
color=alt.Color('variable', scale=alt.Scale(scheme='plasma')),
tooltip=('variable:O', 'mean_value:Q')
).transform_filter(
selection
).transform_aggregate(
mean_value='mean(value):Q',
groupby=["variable"]
).properties(
title="Access to Drinking Water"
)
chart_pie = alt.hconcat(
pipedwaterchart , nationpie
).resolve_scale(
color='independent'
).configure_view(
stroke=None
)
st.altair_chart(chart_pie)
st.caption("Increase in Access to Piped Drinking Water (left) and Type of Access to Drinking Water (right) (Interactive)")
st.write("**Interactivity Guide:** Hover/ Click the 'Country' beads to see the pie change adaptively for the selected Country and Year. To deselect click on whitespace...")
st.write("As we hover over the graph, the tooltip (cursor) shows name of the country of a particular data point. Single Selection which acts as a dynamic query filter, enables user to click on any point and disaply its details on-demand in the form of a pie chart, alongside. The pie chart shows the accessability to Basic, Limited, Unimproved or Surface water in each country. This gives overall idea of the country's water infrastructure.")
st.subheader("***🔑 Key Insight***")
st.write("*Notice how China enhances delivery of drinking water to 80% of its people with Piped Water Connections in 2020 from a 50% in 2000. India clearly needs to improve its delivery through piped water connectivity. This is a clear indication why the Indian Government started heavily investing in schemes like 'Jal Jeevan Mission' (https://jaljeevanmission.gov.in/) that envisions to provide safe and adequate drinking water through individual household tap connections by 2024 to all households in rural India.*")
## PERFORMANCE OF COUNTRIES IN DELIVERING NONCONTAMINATED DRINKING WATER
st.header("2.2. Performance by Nations in Delivering Non-contaminated, Safe Drinking Water to its Citizens")
st.write("As the goal of the SDG is to provide clean/safe drinking water to all, the scatter plots are created to show World Population vs. Safely Managed Water, Non-Contaminated vs. Non-Piped Water, and Non-Contaminated vs. Piped Water. The different dot sizes depict population of a country. The Slider of Years, help dynamically compare the progress of different nations over the time. ")
slider2 = alt.binding_range(min=2000, max=2020, step=1, name='Select year:')
select_year2 = alt.selection_single(name="YEAR", fields=['YEAR'],
bind=slider2, init={'YEAR': 2000})
## NSCM - Non Contaminated VS Safely Managed
NCSM = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x = alt.X('SAFELY_MANAGED_NAT'),
y = alt.Y('NON_CONTAMIN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
title="Safely Managed Non Contaminated Drinking Water",
width = 500,
height = 250
)
## NCNP Non Contaminated VS NON Piped
NCNP = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x = alt.X('NONPIPED_NAT'),
y = alt.Y('NON_CONTAMIN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
title="Non Piped Access to Non Contaminated Drinking Water",
width = 250,
height = 250,
)
## NCAWN Non Contaminated VS Availability When Needed
NCP = alt.Chart(waterdf).mark_circle(opacity=0.9).encode(
x = alt.X('AVAIL_WHEN_NEEDED_NAT',),
y = alt.Y('NON_CONTAMIN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
title="Availability of Non Contaminated Drinking Water When Needed",
width = 250,
height = 250,
)
worldpop = alt.Chart(waterdf).mark_bar().encode(
x="YEAR:O",
y=alt.Y("sum(POP_THOUS):Q",scale=alt.Scale(domain=(0,8000000)),axis=alt.Axis(title='World Population (in 1000s)')),
color=alt.Color('YEAR:N', scale=alt.Scale(scheme='plasma', zero=False)),
tooltip = 'YEAR'
).transform_filter(
select_year2
).add_selection(
select_year2
).properties(
height= 250)
st.write(alt.concat(
(worldpop | NCSM)& (NCNP | NCP)
).resolve_scale(
color='shared'
).configure_view(
stroke=None
)
)
st.caption("Performance by Nations in Delivering Safely Managed Drinking Water to its Citizens(Interactive)")
st.write("**Interactivity Guide:** Move the slider to and fro to visualize. Hover on the circles to identify the country.")
st.write("For most of the countries, the parameters in all the three graphs show clear relation. Non-Contaminated water increases as the Safe management of water increases. Non-piped water increases/decreases, Non-contaminated water decreases/increases. Non-contaminated water increases as Pipe water increases.")
st.subheader("***🔑 Key Insight***")
st.write("*While most of the countries in the World are improving their water infrastructure systems, these charts help us identify the countries with poor development or the ones that need drastic positive changes. Notice Pakistan (near (x=40,y=40)) moving in opposite direction as compared to the rest of world indicating it has failed to provide any improvement in delivering non-contaminated safely managed clean drinking water to its citizens. The lower left chart shows Pakistan, Nigeria, and Ethiopia witnessed increase in proportion of its people having non-piped access to fairly contaminated drinking water. The lower right chart shows that Ethiopia and Nigeria ensured improvement in availability of water when its needed to its citizens but the quality of water fairly contaminated, whereas Pakistan couldn't ensure any development in both the parameters.*")
#########################################################################################################################
## PART B - SANITATION
st.header("3. Sanitation")
## THE SANITATION CORRELATION MATRIX
st.write("Sanitatary waste-water systems have been a tremendously neglected infrastructure, especially in the developing and under-developed countries. The data obtained has 11 different parameters [Link to Variable Dictionary](https://raw.githubusercontent.com/CMU-IDS-2022/assignment-2-dtk2/f367084a4fef6684455252465e3bd7f6e9ae9a67/Dictionary%20-%20sanitary.csv). To visualize the correlation (connection) between these parameters, a correlation matrix is plotted. A couple of parameters show strong correlation among themselves.")
sanit_cor_data = (sanitdf[['BASIC_SAN_NAT', 'LIMITED_SHARED_SAN_NAT', 'UNIMPROVED_SAN_NAT', 'OPENDEFECATION_SAN_NAT', 'SAFELYMANAGED_SAN_NAT', 'DISPOSED_INSITU_SAN_NAT', 'EMP_TREAT_SAN_NAT',
'WW_TREATED_SAN_NAT', 'LATRINES_SAN_NAT', 'SEPTICTANKS_SAN_NAT', 'SEWERCONNECTION_SAN_NAT']]
).corr().stack().reset_index().rename(columns={0: 'correlation', 'level_0': 'variable1', 'level_1': 'variable2'})
sanit_cor_data['correlation_label'] = sanit_cor_data['correlation'].map('{:.2f}'.format) # Round to 2 decimal
s_base = alt.Chart(sanit_cor_data).encode(
x='variable2:O',
y='variable1:O'
)
text = s_base.mark_text().encode(
text='correlation_label',
color=alt.condition(
alt.datum.correlation > 0.1,
alt.value('black'),
alt.value('white')
)
)
# The correlation heatmap
sanit_cor_plot = s_base.mark_rect().encode(
color=alt.Color('correlation:Q', scale=alt.Scale(scheme='plasma'))
).properties(
width=700,
height=500,
title = "The Correlation Matrix: Sanitation"
)
st.write(sanit_cor_plot + text)
st.caption("Correlation Matrix for Sanitation Feature Data")
st.write("The SDG is to ensure safe management of sanitary waste, hence the most important parameter is 'Safely Managed Sanitary SYstem', which shows significantly high (91%) correlation with the 'Sewer Connection'. This indicates that as the Connections to Sewer Networks increase, the Safe Management of Sewer Waste increases.")
## CLASSIFICATION OF SANITATION SEWERAGE INFRASTRUCTURE/ METHODS
st.header("3.1. Classification of Sewerage Infrastructure/ Methods")
st.write("Although Open-defecation is extremely unhygenic, a large number of world-population rely on it. However, the situation is slowly changing. Most of the countries have underground and safe sewer-systems on their developement agenda. The scatter-plot in this section shows increase in population having Sewerage Connection, over 20 years. The different dot sizes depict population of a country. As we hover over the graph, the tooltip (cursor) shows name of the country of a particular data point. Single Selection which acts as a dynamic query filter, enables user to click on any point and disaply its details on-demand in the form of a pie chart, alongside.")
s_selection = alt.selection_single(fields=['YEAR','COUNTRY'])
sewerconnectionchart = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x=alt.X('YEAR:O',axis=alt.Axis(title='Year')),
y=alt.Y('SEWERCONNECTION_SAN_NAT', axis=alt.Axis(title='% Population with Sewerage Connections')),
size='POP_THOUS',
#shape='SDG region',
color = alt.Color('COUNTRY', scale=alt.Scale(scheme='plasma')),
tooltip='COUNTRY'
).add_selection(s_selection).encode(
color=alt.condition(s_selection, "COUNTRY", alt.value("grey"), legend=None, scale=alt.Scale(scheme='plasma')),
).properties(
title="Increase in Underground Sewerage Over Time",
width=400
)
s_nationpie = alt.Chart(sanitpie_melt).mark_arc().encode(
theta=alt.Theta(field='mean_value', type="quantitative"),
color=alt.Color('variable', scale=alt.Scale(scheme='plasma')),
tooltip=('variable:O', 'mean_value:Q')
).transform_filter(
s_selection
).transform_aggregate(
mean_value='mean(value):Q',
groupby=["variable"]
).properties(
title="Disposal Method of Sanitary Waste"
)
st.write(alt.hconcat(
sewerconnectionchart , s_nationpie
).resolve_scale(
color='independent'
).configure_view(
stroke=None
))
st.caption("Increase in Underground Sewerage (left) and Type of Disposal of Sanitary Waste (right) (Interactive)")
st.write("**Interactivity Guide:** Hover/ Click the 'Country' beads to see the pie change adaptively for the selected Country and Year. To deselect click on whitespace...")
st.write("The pie chart shows classification of Sewerage Infrastructure in Basic, imited-shared, Unimproved Sanition and Open defecation. It gives over-all idea of the country's sewerage infrastructure and availability of safely managed sewerage systems. Most of the countries show significant improvement in 20 years.")
st.subheader("***🔑 Key Insight***")
st.write("*China's impressive development in connecting its cities to underground sewerage systems. Notice that India needs to make massive investments in improving its sewerage infrastructure. Notice that India reduces the percentage of open defecation from 74% in 2000 to 15% in 2020!*")
## PERFORMANCE OF COUNTRIES IN DELIVERING NONCONTAMINATED DRINKING WATER
st.header("3.2. Performance by Nations in Safe Collection and Disposal of Sanitary Wastewater from its Citizens")
st.write("SDGs aim to irradicate open defecation and provide safely managed sewerage infrastructure to the people. This section contains scatter plots showing Treated Waste-Water vs. Safely Managed Sanitary System. As their names suggest, these are interdependent and most of the countries show relative progress in these two parameters. The scatter-plot of Treated Waste-Water vs. Open-defecation shows that irradicating open-defecation is a slow yet continuously progresssing process. ")
s_slider = alt.binding_range(min=2000, max=2020, step=1, name='Select year:')
s_select_year = alt.selection_single(name="YEAR", fields=['YEAR'],
bind=s_slider, init={'YEAR': 2000})
## WTSM WW Treated vs. Safely Managed
WTSF = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x = alt.X('SAFELYMANAGED_SAN_NAT'),
y = alt.Y('WW_TREATED_SAN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
title="Safely Managed and Treated Wastewater",
width=500,
height=250)
## WTOD WW Treated vs. Open Defecation
WTOD = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x = alt.X('OPENDEFECATION_SAN_NAT'),
y = alt.Y('WW_TREATED_SAN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
title="Wastewater Treatment vs. Open Defecation",
width=250,
height=250)
## SWC WW Treated vs. Sewer Connection
WTSC = alt.Chart(sanitdf).mark_circle(opacity=0.9).encode(
x = alt.X('SEWERCONNECTION_SAN_NAT'),
y = alt.Y('WW_TREATED_SAN_NAT'),
color=alt.Color('SDG region:O',scale=alt.Scale(scheme='plasma')),
size='POP_THOUS:Q',
tooltip=('COUNTRY', 'SDG region')
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
title="Wastewater Treatment vs. Sewerage Connectivity",
width=250,
height=250)
s_worldpop = alt.Chart(sanitdf).mark_bar().encode(
x="YEAR:O",
y=alt.Y("sum(POP_THOUS):Q",scale=alt.Scale(domain=(0,8000000)),axis=alt.Axis(title='World Population (in 1000s)')),
color=alt.Color('YEAR:N', scale=alt.Scale(scheme='plasma', zero=False)),
tooltip = 'YEAR'
).transform_filter(
s_select_year
).add_selection(
s_select_year
).properties(
height=250)
st.write(alt.concat(
(s_worldpop | WTSF) & (WTOD | WTSC)
).resolve_scale(
color='shared'
).configure_view(
stroke=None
))
st.caption("Performance by Nations in Safe Collection and Disposal of Sanitary Wastewater(Interactive)")
st.write("**Interactivity Guide:** Move the slider to and fro to visualize.")
st.write("Waste-water can be treated only when it is connected to a sewer system, is collected and carried to a treatment plant. The third scatter plot in this section, Treated Waste-Water vs. Sewer Connections show almost direct relation for most of the countries. The different dot sizes depict population of a country. The Slider of Years, help dynamically compare the progress of different nations over the time. For most of the countries, the parameters in all the three graphs show clear relation.")
st.subheader("***🔑 Key Insight***")
st.write("*These charts help us identify the countries with poor development or the ones that need drastic positive changes. In the upper chart notice that on one hand India seems to struggle in treating wastewater but also shows drastic improvement in safely managing the waterwater. The lower two charts help us understand why! Observe the lower two charts carefully, India reduces open defecation but there is almost no increase in proportional treatment of wastewater. This is primarily because India conventionally has decentralized sanitation, meaning the absence of a centralized sanitary wastewater collection and treatment infrastructure. It ensures the reduction in open defecation essentially by having in-situ septic tanks which are not connected to a centralized underground wastewater network infrastructure.*")
st.markdown("***Data Source:** WHO-UNICEF JOINT MONITORING PROGRAM [Webpage](https://washdata.org/how-we-work/sdg-monitoring).*")
st.markdown("This project was created by [<NAME>](https://www.linkedin.com/in/tanaykulkarni/) and [<NAME>](https://www.linkedin.com/in/devashrikarve/) for the [Interactive Data Science](https://dig.cmu.edu/ids2022) course at [Carnegie Mellon University](https://www.cmu.edu).") | 0.37319 | 0.331985 |
from abc import ABC, abstractmethod
from typing import Tuple
import tensorflow as tf
from spatial_transform.spatail_grid import FlatGrid
from spatial_transform.interpolation import SpatialInterpolator
from spatial_transform.layers import TensorToTensorLayer, IdentityLayer
from spatial_transform.localization import LocalizationLayer
from spatial_transform.spatial_transforms import SpatialTransformType
class SpatialTransformBlock(TensorToTensorLayer, ABC):
"""
Interface for Spatial Transform Block
"""
def __init__(
self,
shape_out: Tuple[int, int],
**kwargs
):
"""
:param shape_out: output image shape
:param kwargs:
"""
super().__init__(**kwargs)
self._shape_out = shape_out
@abstractmethod
def call(self, inputs: tf.Tensor, training: bool) -> tf.Tensor:
"""
:param inputs: tf.Tensor, shape = [batch, height, width, channels], dtype = tf.float32
:param training: bool
:return: tf.Tensor, shape = [batch, :param shape_out[0], :param shape_out[1], channels], dtype = tf.float32
"""
raise NotImplementedError()
def get_config(self):
config = super().get_config().copy()
config.update({
'shape_out': self._shape_out,
})
return config
class CustomSpatialTransformBlock(SpatialTransformBlock):
"""
STN-CX block implementation
+->- [ conv layers ] ->- [ localization_layer ] ->-+
| |
->-+--------------------->---------------- [ interpolation_layer ] ->-
"""
def __init__(
self,
localization_layer: LocalizationLayer[SpatialTransformType],
spatial_transform: SpatialTransformType,
interpolator: SpatialInterpolator,
conv_layers: TensorToTensorLayer,
shape_out: Tuple[int, int],
**kwargs
):
"""
:param localization_layer: Localisation layer parameterized with :param spatial_transform
:param spatial_transform: Spatial transform type
:param interpolator: Interpolation type
:param conv_layers: layer followed by :param localization layer
:param shape_out: output image shape
"""
super().__init__(shape_out=shape_out, **kwargs)
self._localization_layer = localization_layer
self._spatial_transform = spatial_transform
self._interpolator = interpolator
self._conv_layers = conv_layers
def call(self, inputs: tf.Tensor, training: bool) -> tf.Tensor:
"""
:param inputs: tf.Tensor, shape = [batch, height, width, channels], dtype = tf.float32
:param training: bool
:return: tf.Tensor, shape = [batch, height, width, channels], dtype = tf.float32
"""
batch_size = tf.shape(inputs)[0]
features = self._conv_layers(inputs=inputs, training=training)
transformation_params = self.localization_layer(inputs=features, training=training)
grid = FlatGrid(shape_out=self._shape_out, batch_size=batch_size)
transformed_grid = \
self._spatial_transform.transform_grid(transformation_params=transformation_params, grid=grid)
output = self.interpolator.interpolate(image=inputs, grid=transformed_grid)
return output
@property
def localization_layer(self) -> LocalizationLayer:
return self._localization_layer
@property
def interpolator(self) -> SpatialInterpolator:
return self._interpolator
@property
def conv_layers(self) -> (tf.keras.layers.Layer, TensorToTensorLayer):
return self._conv_layers
class SimpleSpatialTransformBlock(CustomSpatialTransformBlock):
"""
STN-C0 block implementation
+->-[ localization_layer ]->-+
| |
->-+------->---------[ interpolation_layer ]->-
"""
def __init__(
self,
localization_layer: LocalizationLayer[SpatialTransformType],
spatial_transform: SpatialTransformType,
interpolator: SpatialInterpolator,
shape_out: Tuple[int, int],
**kwargs
):
super().__init__(
localization_layer = localization_layer,
spatial_transform = spatial_transform,
interpolator = interpolator,
conv_layers = IdentityLayer(),
shape_out = shape_out,
**kwargs
) | spatial_transform/st_blocks.py | from abc import ABC, abstractmethod
from typing import Tuple
import tensorflow as tf
from spatial_transform.spatail_grid import FlatGrid
from spatial_transform.interpolation import SpatialInterpolator
from spatial_transform.layers import TensorToTensorLayer, IdentityLayer
from spatial_transform.localization import LocalizationLayer
from spatial_transform.spatial_transforms import SpatialTransformType
class SpatialTransformBlock(TensorToTensorLayer, ABC):
"""
Interface for Spatial Transform Block
"""
def __init__(
self,
shape_out: Tuple[int, int],
**kwargs
):
"""
:param shape_out: output image shape
:param kwargs:
"""
super().__init__(**kwargs)
self._shape_out = shape_out
@abstractmethod
def call(self, inputs: tf.Tensor, training: bool) -> tf.Tensor:
"""
:param inputs: tf.Tensor, shape = [batch, height, width, channels], dtype = tf.float32
:param training: bool
:return: tf.Tensor, shape = [batch, :param shape_out[0], :param shape_out[1], channels], dtype = tf.float32
"""
raise NotImplementedError()
def get_config(self):
config = super().get_config().copy()
config.update({
'shape_out': self._shape_out,
})
return config
class CustomSpatialTransformBlock(SpatialTransformBlock):
"""
STN-CX block implementation
+->- [ conv layers ] ->- [ localization_layer ] ->-+
| |
->-+--------------------->---------------- [ interpolation_layer ] ->-
"""
def __init__(
self,
localization_layer: LocalizationLayer[SpatialTransformType],
spatial_transform: SpatialTransformType,
interpolator: SpatialInterpolator,
conv_layers: TensorToTensorLayer,
shape_out: Tuple[int, int],
**kwargs
):
"""
:param localization_layer: Localisation layer parameterized with :param spatial_transform
:param spatial_transform: Spatial transform type
:param interpolator: Interpolation type
:param conv_layers: layer followed by :param localization layer
:param shape_out: output image shape
"""
super().__init__(shape_out=shape_out, **kwargs)
self._localization_layer = localization_layer
self._spatial_transform = spatial_transform
self._interpolator = interpolator
self._conv_layers = conv_layers
def call(self, inputs: tf.Tensor, training: bool) -> tf.Tensor:
"""
:param inputs: tf.Tensor, shape = [batch, height, width, channels], dtype = tf.float32
:param training: bool
:return: tf.Tensor, shape = [batch, height, width, channels], dtype = tf.float32
"""
batch_size = tf.shape(inputs)[0]
features = self._conv_layers(inputs=inputs, training=training)
transformation_params = self.localization_layer(inputs=features, training=training)
grid = FlatGrid(shape_out=self._shape_out, batch_size=batch_size)
transformed_grid = \
self._spatial_transform.transform_grid(transformation_params=transformation_params, grid=grid)
output = self.interpolator.interpolate(image=inputs, grid=transformed_grid)
return output
@property
def localization_layer(self) -> LocalizationLayer:
return self._localization_layer
@property
def interpolator(self) -> SpatialInterpolator:
return self._interpolator
@property
def conv_layers(self) -> (tf.keras.layers.Layer, TensorToTensorLayer):
return self._conv_layers
class SimpleSpatialTransformBlock(CustomSpatialTransformBlock):
"""
STN-C0 block implementation
+->-[ localization_layer ]->-+
| |
->-+------->---------[ interpolation_layer ]->-
"""
def __init__(
self,
localization_layer: LocalizationLayer[SpatialTransformType],
spatial_transform: SpatialTransformType,
interpolator: SpatialInterpolator,
shape_out: Tuple[int, int],
**kwargs
):
super().__init__(
localization_layer = localization_layer,
spatial_transform = spatial_transform,
interpolator = interpolator,
conv_layers = IdentityLayer(),
shape_out = shape_out,
**kwargs
) | 0.95275 | 0.64639 |
import unittest
from unittest.mock import patch, mock_open
from jsonschema import exceptions
from .. import (
config
)
class TestValidateConfig(unittest.TestCase):
def _get_configuration(self, data):
with patch('builtins.open', mock_open(read_data=data)):
return config.load_config('foo')
def test_file_upload_path(self):
config_data = """
dataset_type: file
file_config:
path: test
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_file_upload_directory(self):
config_data = """
dataset_type: file
file_config:
directory: test
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_unknown_configuration(self):
config_data = """
foo: bar
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_unknown_dataset_type(self):
config_data = """
dataset_type: foo
file_config:
path: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_dataset_multiple_valid(self):
config_data = """
dataset_type: foo
file_config:
path: test
directory: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_dataset_multiple_invalid(self):
config_data = """
dataset_type: foo
file_config:
path: test
foo: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_unknown_config(self):
config_data = """
dataset_type: file
foo_config:
path: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_config_missing(self):
config_data = """
dataset_type: file
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_config_missing_value(self):
config_data = """
dataset_type: file
file_config:
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_config_unknown(self):
config_data = """
dataset_type: file
file_config:
foo: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_config(self):
config_data = """
dataset_type: sql
sql_config:
connection: test
query: test
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_sql_missing_config(self):
config_data = """
dataset_type: sql
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_unknown_config(self):
config_data = """
dataset_type: sql
foo_config:
connection: test
query: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_config_missing_connection(self):
config_data = """
dataset_type: sql
sql_config:
query: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_config_missing_query(self):
config_data = """
dataset_type: sql
sql_config:
connection: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
- a/directory/path
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_image_config_file_config_missing(self):
config_data = """
imageset_type: file
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_invalid_type(self):
config_data = """
imageset_type: foo
file_config:
paths:
- image.jpg
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_invalid_paths(self):
config_data = """
imageset_type: file
file_config:
paths:
- 123
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_config(self):
config_data = """
imageset_type: file
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_collection(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_dataset(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
- a/directory/path
collection_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_dataset_column(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
- a/directory/path
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish(self):
config_data = """
update_type: publish
publish_config:
publish: true
destination_project: foo
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_collection_update_unknown(self):
config_data = """
update_type: foo
publish_config:
publish: true
destination_project: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_config_missing(self):
config_data = """
update_type: publish
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_publish_missing(self):
config_data = """
update_type: publish
publish_config:
destination_project: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_destination_missing(self):
config_data = """
update_type: publish
publish_config:
publish: true
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_incorrect_publish_type(self):
config_data = """
update_type: publish
publish_config:
publish: foo
destination_project: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_incorrect_destination_type(self):
config_data = """
update_type: publish
publish_config:
publish: true
destination_project: 123
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration) | zeg/tests/test_config.py | import unittest
from unittest.mock import patch, mock_open
from jsonschema import exceptions
from .. import (
config
)
class TestValidateConfig(unittest.TestCase):
def _get_configuration(self, data):
with patch('builtins.open', mock_open(read_data=data)):
return config.load_config('foo')
def test_file_upload_path(self):
config_data = """
dataset_type: file
file_config:
path: test
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_file_upload_directory(self):
config_data = """
dataset_type: file
file_config:
directory: test
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_unknown_configuration(self):
config_data = """
foo: bar
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_unknown_dataset_type(self):
config_data = """
dataset_type: foo
file_config:
path: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_dataset_multiple_valid(self):
config_data = """
dataset_type: foo
file_config:
path: test
directory: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_dataset_multiple_invalid(self):
config_data = """
dataset_type: foo
file_config:
path: test
foo: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_unknown_config(self):
config_data = """
dataset_type: file
foo_config:
path: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_config_missing(self):
config_data = """
dataset_type: file
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_config_missing_value(self):
config_data = """
dataset_type: file
file_config:
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_file_config_unknown(self):
config_data = """
dataset_type: file
file_config:
foo: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_config(self):
config_data = """
dataset_type: sql
sql_config:
connection: test
query: test
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_sql_missing_config(self):
config_data = """
dataset_type: sql
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_unknown_config(self):
config_data = """
dataset_type: sql
foo_config:
connection: test
query: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_config_missing_connection(self):
config_data = """
dataset_type: sql
sql_config:
query: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_sql_config_missing_query(self):
config_data = """
dataset_type: sql
sql_config:
connection: test
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
- a/directory/path
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_image_config_file_config_missing(self):
config_data = """
imageset_type: file
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_invalid_type(self):
config_data = """
imageset_type: foo
file_config:
paths:
- image.jpg
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_invalid_paths(self):
config_data = """
imageset_type: file
file_config:
paths:
- 123
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_config(self):
config_data = """
imageset_type: file
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_collection(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
dataset_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_dataset(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
- a/directory/path
collection_id: 5ad3a99b75f3b30001732f36
dataset_column: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_image_config_missing_dataset_column(self):
config_data = """
imageset_type: file
file_config:
paths:
- image.jpg
- a/directory/path
collection_id: 5ad3a99b75f3b30001732f36
dataset_id: 5ad3a99b75f3b30001732f36
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish(self):
config_data = """
update_type: publish
publish_config:
publish: true
destination_project: foo
"""
configuration = self._get_configuration(config_data)
try:
config.validate_config(configuration)
except exceptions.ValidationError:
self.fail('Failed validation')
def test_collection_update_unknown(self):
config_data = """
update_type: foo
publish_config:
publish: true
destination_project: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_config_missing(self):
config_data = """
update_type: publish
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_publish_missing(self):
config_data = """
update_type: publish
publish_config:
destination_project: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_destination_missing(self):
config_data = """
update_type: publish
publish_config:
publish: true
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_incorrect_publish_type(self):
config_data = """
update_type: publish
publish_config:
publish: foo
destination_project: foo
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration)
def test_collection_publish_incorrect_destination_type(self):
config_data = """
update_type: publish
publish_config:
publish: true
destination_project: 123
"""
configuration = self._get_configuration(config_data)
with self.assertRaises(exceptions.ValidationError):
config.validate_config(configuration) | 0.553023 | 0.321953 |
# http://msdn.microsoft.com/en-us/library/aa930622.aspx
def struct_BITMAPINFOHEADER(stream, offset, max_size, parent, name, \
height_div_2 = False):
import C;
result = C.STRUCT(stream, offset, max_size, parent, name, \
'BITMAPINFOHEADER', \
('Size', C.DWORD),
('Width', C.LONG),
('Height', C.LONG),
('Planes', C.WORD),
('BitCount', C.WORD),
('Compression', C.DWORD),
('SizeImage', C.DWORD),
('XPelsPerMeter', C.INT),
('YPelsPerMeter', C.INT),
('ClrUsed', C.DWORD),
('ClrImportant', C.DWORD),
);
w = result._Width.value;
h = result._Height.value;
details = [
'%dx%d' % (w, h),
'%d bit' % (result._BitCount.value),
];
if w <= 0:
result._Width.warnings.append('expected value larger than 0');
if h <= 0:
result._Height.notes.append('image is top-down');
h = -h;
if height_div_2:
h /= 2;
result._Height.notes.append( \
'image divided into 2 * 0x%X|%d' % (h, h));
if w > 0 and h > 0:
if w > 10000:
result.warnings.append('value is large');
if h > 10000:
result.warnings.append('value is large');
if w * h > 0xFFFFFFFF:
result.warnings.append('W*H overflows => 0x%X`%08X|%d' % \
(w * h >> 32, w * h & 0xFFFFFFFF, w * h & 0xFFFFFFFF));
elif w * h > 0x7FFFFFFF:
result.warnings.append('W*H overflows (signed) => 0x%X`%08X|%d' % \
(w * h >> 31, w * h & 0x7FFFFFFF, w * h & 0x7FFFFFFF));
elif w * h > 0x01000000:
result.warnings.append('W*H is large => 0x%X|%d' % (w * h, w * h));
else:
result._Width.notes.append('W*H => 0x%X|%d' % (w * h, w * h));
if result._Planes.value > 100:
result._Planes.warnings.append('value is large, expected value to be 1');
elif result._Planes.value != 1:
result._Planes.warnings.append('expected value to be 1');
if result._BitCount.value not in [1,4,8,16,24,32]:
result._BitCount.warnings.append( \
'Unusual value; expected 1, 4, 8, 16, 24 or 32');
compression_methods = { # description, BitCount limitations
0: ('uncompressed', None),
1: ('8 bit RLE', [8]),
2: ('4 bit RLE', [4]),
3: ('bitfield', [16, 32]),
4: ('JPEG', None),
5: ('PNG', None),
};
if result._Compression.value not in compression_methods:
result._Compression.warnings.append('unknown compression method');
details.append('unknown compression');
else:
description, valid_bit_counts = \
compression_methods[result._Compression.value];
details.append(description);
result._Compression.notes.append(description);
if valid_bit_counts is not None \
and result._BitCount.value not in valid_bit_counts:
result._Compression.warnings.append( \
'invalid for %d bits per pixel' % result._BitCount.value);
if result._SizeImage.value > 0x010000000:
result._SizeImage.warnings.append( \
'image is large: %dMb' % (result._SizeImage.value / 0x100000));
if result._XPelsPerMeter.value < 0:
result._XPelsPerMeter.warnings.append('expected positive value or 0');
if result._YPelsPerMeter.value < 0:
result._YPelsPerMeter.warnings.append('expected positive value or 0');
max_number_of_colors = 2 ** result._BitCount.value;
if result._ClrUsed.value > max_number_of_colors:
result._ClrUsed.warnings.append('expected value < 0x%X|%d' % \
(max_number_of_colors, max_number_of_colors));
if result._ClrImportant.value > result._ClrUsed.value:
result._ClrImportant.warnings.append('expected value < 0x%X|%d' % \
(result._ClrUsed.value, result._ClrUsed.value));
result.format_details = ', '.join(details);
return result; | decode/struct_BITMAPINFOHEADER.py |
# http://msdn.microsoft.com/en-us/library/aa930622.aspx
def struct_BITMAPINFOHEADER(stream, offset, max_size, parent, name, \
height_div_2 = False):
import C;
result = C.STRUCT(stream, offset, max_size, parent, name, \
'BITMAPINFOHEADER', \
('Size', C.DWORD),
('Width', C.LONG),
('Height', C.LONG),
('Planes', C.WORD),
('BitCount', C.WORD),
('Compression', C.DWORD),
('SizeImage', C.DWORD),
('XPelsPerMeter', C.INT),
('YPelsPerMeter', C.INT),
('ClrUsed', C.DWORD),
('ClrImportant', C.DWORD),
);
w = result._Width.value;
h = result._Height.value;
details = [
'%dx%d' % (w, h),
'%d bit' % (result._BitCount.value),
];
if w <= 0:
result._Width.warnings.append('expected value larger than 0');
if h <= 0:
result._Height.notes.append('image is top-down');
h = -h;
if height_div_2:
h /= 2;
result._Height.notes.append( \
'image divided into 2 * 0x%X|%d' % (h, h));
if w > 0 and h > 0:
if w > 10000:
result.warnings.append('value is large');
if h > 10000:
result.warnings.append('value is large');
if w * h > 0xFFFFFFFF:
result.warnings.append('W*H overflows => 0x%X`%08X|%d' % \
(w * h >> 32, w * h & 0xFFFFFFFF, w * h & 0xFFFFFFFF));
elif w * h > 0x7FFFFFFF:
result.warnings.append('W*H overflows (signed) => 0x%X`%08X|%d' % \
(w * h >> 31, w * h & 0x7FFFFFFF, w * h & 0x7FFFFFFF));
elif w * h > 0x01000000:
result.warnings.append('W*H is large => 0x%X|%d' % (w * h, w * h));
else:
result._Width.notes.append('W*H => 0x%X|%d' % (w * h, w * h));
if result._Planes.value > 100:
result._Planes.warnings.append('value is large, expected value to be 1');
elif result._Planes.value != 1:
result._Planes.warnings.append('expected value to be 1');
if result._BitCount.value not in [1,4,8,16,24,32]:
result._BitCount.warnings.append( \
'Unusual value; expected 1, 4, 8, 16, 24 or 32');
compression_methods = { # description, BitCount limitations
0: ('uncompressed', None),
1: ('8 bit RLE', [8]),
2: ('4 bit RLE', [4]),
3: ('bitfield', [16, 32]),
4: ('JPEG', None),
5: ('PNG', None),
};
if result._Compression.value not in compression_methods:
result._Compression.warnings.append('unknown compression method');
details.append('unknown compression');
else:
description, valid_bit_counts = \
compression_methods[result._Compression.value];
details.append(description);
result._Compression.notes.append(description);
if valid_bit_counts is not None \
and result._BitCount.value not in valid_bit_counts:
result._Compression.warnings.append( \
'invalid for %d bits per pixel' % result._BitCount.value);
if result._SizeImage.value > 0x010000000:
result._SizeImage.warnings.append( \
'image is large: %dMb' % (result._SizeImage.value / 0x100000));
if result._XPelsPerMeter.value < 0:
result._XPelsPerMeter.warnings.append('expected positive value or 0');
if result._YPelsPerMeter.value < 0:
result._YPelsPerMeter.warnings.append('expected positive value or 0');
max_number_of_colors = 2 ** result._BitCount.value;
if result._ClrUsed.value > max_number_of_colors:
result._ClrUsed.warnings.append('expected value < 0x%X|%d' % \
(max_number_of_colors, max_number_of_colors));
if result._ClrImportant.value > result._ClrUsed.value:
result._ClrImportant.warnings.append('expected value < 0x%X|%d' % \
(result._ClrUsed.value, result._ClrUsed.value));
result.format_details = ', '.join(details);
return result; | 0.442155 | 0.141726 |
__author__ = "<NAME> as part of research at imaal.byu.edu"
from scapy.all import sr1
from scapy.layers.inet import IP, TCP
from scapy.layers.inet6 import IPv6
import argparse
import multiprocessing as mp
from tqdm import tqdm
import os
import json
port = 53
ip6_src = None
TARGET = "target"
RESULT = "result"
json_keys = [TARGET, RESULT]
def query(ip):
"""
queries an IP to see if TCP Fast Open option is set in SYN ACK
:param ip: the ip to query. Uses `dport` constant
:return: a tuple of ip, (True, False, Timeout). True if TFO set and Timeout if no response received
"""
ip = ip.strip('\n')
json_response = {key: None for key in json_keys}
json_response[TARGET] = ip
# sr1 - get single response, flags="S" - send SYN, options TFO - set fast open in options
try:
ip_layer = IP(dst=ip) if ":" not in ip else IPv6(dst=ip, src=ip6_src)
# ip_layer.show()
res = sr1(ip_layer / TCP(dport=port, flags="S", options=[('TFO', '')]), timeout=5, verbose=False)
# res.show()
if res is None:
json_response[RESULT] = "Timeout"
else:
json_response[RESULT] = ('TFO' in dict(res[1].options)) # check if TFO is set in TCP response options
except Exception as e:
print(e)
print(ip)
json_response[RESULT] = "Can't resolve"
finally:
return json_response
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Running a series of scapy scans on a list of IPs to look for TFO")
parser.add_argument('input', help="Input file containing a list of IPs")
parser.add_argument('output', help="File to write results to", default='TFO_output.txt')
parser.add_argument('-p', '--port', help="The port to run the scans on", default=53, type=int)
parser.add_argument('-n', '--num-threads', help="Number of threads to execute queries", default=64, type=int)
parser.add_argument('-6', '--ip6_src', help="Specifies the source address for ipv6 since scapy doesn't autofill")
args = parser.parse_args()
ip_file = open(args.input)
ips = ip_file.readlines()
if not ips[0][0].isdecimal():
ips = ips[1:]
ip_file.close()
threads = min(args.num_threads, len(ips))
port = args.port
ip6_src = args.ip6_src
summary = open(args.output, 'w')
results = []
print("Beginning the {} queries using {} threads. ".format(len(ips), threads))
with open(args.output, 'w') as output_file:
with mp.Pool(processes=threads) as p:
try:
for result in tqdm(p.imap_unordered(query, ips), total=len(ips)):
output_file.write(json.dumps(result) + '\n')
except KeyboardInterrupt:
p.terminate()
p.join()
print("Exiting early from queries. Current results will still be written")
print("Queries finished. Writing results")
os.chmod(args.output, 0o777) # since script runs privileged, change file to be user writeable | tfo/stub_recursive/tfo_flag.py | __author__ = "<NAME> as part of research at imaal.byu.edu"
from scapy.all import sr1
from scapy.layers.inet import IP, TCP
from scapy.layers.inet6 import IPv6
import argparse
import multiprocessing as mp
from tqdm import tqdm
import os
import json
port = 53
ip6_src = None
TARGET = "target"
RESULT = "result"
json_keys = [TARGET, RESULT]
def query(ip):
"""
queries an IP to see if TCP Fast Open option is set in SYN ACK
:param ip: the ip to query. Uses `dport` constant
:return: a tuple of ip, (True, False, Timeout). True if TFO set and Timeout if no response received
"""
ip = ip.strip('\n')
json_response = {key: None for key in json_keys}
json_response[TARGET] = ip
# sr1 - get single response, flags="S" - send SYN, options TFO - set fast open in options
try:
ip_layer = IP(dst=ip) if ":" not in ip else IPv6(dst=ip, src=ip6_src)
# ip_layer.show()
res = sr1(ip_layer / TCP(dport=port, flags="S", options=[('TFO', '')]), timeout=5, verbose=False)
# res.show()
if res is None:
json_response[RESULT] = "Timeout"
else:
json_response[RESULT] = ('TFO' in dict(res[1].options)) # check if TFO is set in TCP response options
except Exception as e:
print(e)
print(ip)
json_response[RESULT] = "Can't resolve"
finally:
return json_response
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Running a series of scapy scans on a list of IPs to look for TFO")
parser.add_argument('input', help="Input file containing a list of IPs")
parser.add_argument('output', help="File to write results to", default='TFO_output.txt')
parser.add_argument('-p', '--port', help="The port to run the scans on", default=53, type=int)
parser.add_argument('-n', '--num-threads', help="Number of threads to execute queries", default=64, type=int)
parser.add_argument('-6', '--ip6_src', help="Specifies the source address for ipv6 since scapy doesn't autofill")
args = parser.parse_args()
ip_file = open(args.input)
ips = ip_file.readlines()
if not ips[0][0].isdecimal():
ips = ips[1:]
ip_file.close()
threads = min(args.num_threads, len(ips))
port = args.port
ip6_src = args.ip6_src
summary = open(args.output, 'w')
results = []
print("Beginning the {} queries using {} threads. ".format(len(ips), threads))
with open(args.output, 'w') as output_file:
with mp.Pool(processes=threads) as p:
try:
for result in tqdm(p.imap_unordered(query, ips), total=len(ips)):
output_file.write(json.dumps(result) + '\n')
except KeyboardInterrupt:
p.terminate()
p.join()
print("Exiting early from queries. Current results will still be written")
print("Queries finished. Writing results")
os.chmod(args.output, 0o777) # since script runs privileged, change file to be user writeable | 0.560854 | 0.18429 |
import argparse
import os
import utils.utils as utils
def get_configurations(parser=None):
# set configurations here
experiment_name = 'on_white_II_waterfall' # write here the name of the experiment
experiments_dir_name = os.path.join('experiments', experiment_name)
main_style_image_name = 'on_white_II'
tuning_blocks_style_image_name = 'waterfall'
tuning_blocks_lower_style_image_name = 'waterfall'
tuning_blocks_higher_style_image_name = 'waterfall'
main_epochs = 2 # 2
tuning_blocks_epochs = 2 # 2
batch_size = 4
learning_rate_main = 1e-3
learning_rate_blocks = 1e-4
main_content_wight = 1
main_style_wight = 1e5
network_version = 'normal' # 'normal' \ 'dual'
blocks_content_wight = 1 # set for network_version = 'normal'
blocks_style_wight = 1e7 # set for network_version = 'normal'
blocks_lower_content_wight = 1 # set for network_version = 'dual'
blocks_lower_style_wight = 1e5 # set for network_version = 'dual'
blocks_higher_content_wight = 1 # set for network_version = 'dual'
blocks_higher_style_wight = 1e5 # set for network_version = 'dual'
image_size = 256
vgg_output = True
main_style_size = None
blocks_style_size = None
style_wight0 = 1
style_wight1 = 1
style_wight2 = 1
style_wight3 = 1
style_wight4 = 1
training_scheme = 'all' # all, only_main, only_tuning_blocks, only_tuning_blocks_lower, only_tuning_blocks_higher
checkpoint_iter = 5000
eval_iter = 1000
intermediate_images_iter = 500
current_batch_eval_iter = 100
train_data_path = 'Path to COCO2014_train'
val_data_path = 'Path to COCO2014_val'
model_top_params = 'main_%d_blocks_%d' % (main_style_wight, blocks_style_wight)
checkpoint_dir = os.path.join(experiments_dir_name, 'checkpoints')
model_save_dir = os.path.join(experiments_dir_name, 'model_dir')
images_save_dir = os.path.join(experiments_dir_name, 'images')
main_style_image_path = os.path.join('images', 'style_images', main_style_image_name + '.jpg')
tuning_blocks_lower_style_image_path = os.path.join('images', 'style_images', tuning_blocks_lower_style_image_name + '.jpg')
tuning_blocks_higher_style_image_path = os.path.join('images', 'style_images', tuning_blocks_higher_style_image_name + '.jpg')
tuning_blocks_style_image_path = os.path.join('images', 'style_images', tuning_blocks_style_image_name + '.jpg')
evaluation_images_path = os.path.join('images', 'evaluation_images')
pre_trained_main_model = os.path.join(model_save_dir, 'orginal_main_latest.pth')
pre_trained_tuning_blocks_lower = os.path.join(model_save_dir, 'tuning_blocks_lower.pth')
pre_trained_tuning_blocks_higher = os.path.join(model_save_dir, 'tuning_blocks_higher.pth')
# set parser
if parser is None:
parser = argparse.ArgumentParser()
parser.add_argument('--main_style_image_name', default=main_style_image_name)
parser.add_argument('--main_epochs', default=main_epochs, type=int)
parser.add_argument('--tuning_blocks_epochs', default=tuning_blocks_epochs, type=int)
parser.add_argument('--batch_size', default=batch_size, type=int)
parser.add_argument('--image_size', default=image_size, type=int)
parser.add_argument('--style_size', default=main_style_size, type=int)
parser.add_argument('--blocks_style_size', default=blocks_style_size, type=int)
parser.add_argument('--learning_rate_main', default=learning_rate_main, type=float)
parser.add_argument('--learning_rate_blocks', default=learning_rate_blocks, type=float)
parser.add_argument('--main_content_wight', default=main_content_wight, type=float)
parser.add_argument('--main_style_wight', default=main_style_wight, type=float)
parser.add_argument('--checkpoint_iter', default=checkpoint_iter, type=int)
parser.add_argument('--eval_iter', default=eval_iter, type=int)
parser.add_argument('--intermediate_images_iter', default=intermediate_images_iter, type=int)
parser.add_argument('--current_batch_eval_iter', default=current_batch_eval_iter, type=int)
parser.add_argument('--train_data_path', default=train_data_path)
parser.add_argument('--val_data_path', default=val_data_path)
parser.add_argument('--model_name', default=model_top_params)
parser.add_argument('--experiments_dir_name', default=experiments_dir_name)
parser.add_argument('--checkpoint_dir', default=checkpoint_dir)
parser.add_argument('--model_save_dir', default=model_save_dir)
parser.add_argument('--images_save_dir', default=images_save_dir)
parser.add_argument('--pre_trained_main_model', default=pre_trained_main_model)
parser.add_argument('--main_style_image_path', default=main_style_image_path)
parser.add_argument('--evaluation_images_path', default=evaluation_images_path)
parser.add_argument('--vgg_output', default=vgg_output, type=lambda x:bool(utils.str2bool(x)))
parser.add_argument('--style_wight0', default=style_wight0, type=float)
parser.add_argument('--style_wight1', default=style_wight1, type=float)
parser.add_argument('--style_wight2', default=style_wight2, type=float)
parser.add_argument('--style_wight3', default=style_wight3, type=float)
parser.add_argument('--style_wight4', default=style_wight4, type=float)
parser.add_argument('--training_scheme', default=training_scheme)
parser.add_argument('--network_version', default=network_version)
if network_version is 'dual':
parser.add_argument('--blocks_lower_content_wight', default=blocks_lower_content_wight, type=float)
parser.add_argument('--blocks_lower_style_wight', default=blocks_lower_style_wight, type=float)
parser.add_argument('--blocks_higher_content_wight', default=blocks_higher_content_wight, type=float)
parser.add_argument('--blocks_higher_style_wight', default=blocks_higher_style_wight, type=float)
parser.add_argument('--tuning_blocks_lower_style_image_name', default=tuning_blocks_lower_style_image_name)
parser.add_argument('--tuning_blocks_higher_style_image_name', default=tuning_blocks_higher_style_image_name)
parser.add_argument('--tuning_blocks_lower_style_image_path', default=tuning_blocks_lower_style_image_path)
parser.add_argument('--tuning_blocks_higher_style_image_path', default=tuning_blocks_higher_style_image_path)
parser.add_argument('--pre_trained_tuning_blocks_lower', default=pre_trained_tuning_blocks_lower)
parser.add_argument('--pre_trained_tuning_blocks_higher', default=pre_trained_tuning_blocks_higher)
elif network_version is 'normal':
parser.add_argument('--blocks_content_wight', default=blocks_content_wight, type=float)
parser.add_argument('--blocks_style_wight', default=blocks_style_wight, type=float)
parser.add_argument('--block_style_image_name', default=tuning_blocks_style_image_name)
parser.add_argument('--tuning_blocks_style_image_path', default=tuning_blocks_style_image_path)
opt = parser.parse_args()
return opt | config.py | import argparse
import os
import utils.utils as utils
def get_configurations(parser=None):
# set configurations here
experiment_name = 'on_white_II_waterfall' # write here the name of the experiment
experiments_dir_name = os.path.join('experiments', experiment_name)
main_style_image_name = 'on_white_II'
tuning_blocks_style_image_name = 'waterfall'
tuning_blocks_lower_style_image_name = 'waterfall'
tuning_blocks_higher_style_image_name = 'waterfall'
main_epochs = 2 # 2
tuning_blocks_epochs = 2 # 2
batch_size = 4
learning_rate_main = 1e-3
learning_rate_blocks = 1e-4
main_content_wight = 1
main_style_wight = 1e5
network_version = 'normal' # 'normal' \ 'dual'
blocks_content_wight = 1 # set for network_version = 'normal'
blocks_style_wight = 1e7 # set for network_version = 'normal'
blocks_lower_content_wight = 1 # set for network_version = 'dual'
blocks_lower_style_wight = 1e5 # set for network_version = 'dual'
blocks_higher_content_wight = 1 # set for network_version = 'dual'
blocks_higher_style_wight = 1e5 # set for network_version = 'dual'
image_size = 256
vgg_output = True
main_style_size = None
blocks_style_size = None
style_wight0 = 1
style_wight1 = 1
style_wight2 = 1
style_wight3 = 1
style_wight4 = 1
training_scheme = 'all' # all, only_main, only_tuning_blocks, only_tuning_blocks_lower, only_tuning_blocks_higher
checkpoint_iter = 5000
eval_iter = 1000
intermediate_images_iter = 500
current_batch_eval_iter = 100
train_data_path = 'Path to COCO2014_train'
val_data_path = 'Path to COCO2014_val'
model_top_params = 'main_%d_blocks_%d' % (main_style_wight, blocks_style_wight)
checkpoint_dir = os.path.join(experiments_dir_name, 'checkpoints')
model_save_dir = os.path.join(experiments_dir_name, 'model_dir')
images_save_dir = os.path.join(experiments_dir_name, 'images')
main_style_image_path = os.path.join('images', 'style_images', main_style_image_name + '.jpg')
tuning_blocks_lower_style_image_path = os.path.join('images', 'style_images', tuning_blocks_lower_style_image_name + '.jpg')
tuning_blocks_higher_style_image_path = os.path.join('images', 'style_images', tuning_blocks_higher_style_image_name + '.jpg')
tuning_blocks_style_image_path = os.path.join('images', 'style_images', tuning_blocks_style_image_name + '.jpg')
evaluation_images_path = os.path.join('images', 'evaluation_images')
pre_trained_main_model = os.path.join(model_save_dir, 'orginal_main_latest.pth')
pre_trained_tuning_blocks_lower = os.path.join(model_save_dir, 'tuning_blocks_lower.pth')
pre_trained_tuning_blocks_higher = os.path.join(model_save_dir, 'tuning_blocks_higher.pth')
# set parser
if parser is None:
parser = argparse.ArgumentParser()
parser.add_argument('--main_style_image_name', default=main_style_image_name)
parser.add_argument('--main_epochs', default=main_epochs, type=int)
parser.add_argument('--tuning_blocks_epochs', default=tuning_blocks_epochs, type=int)
parser.add_argument('--batch_size', default=batch_size, type=int)
parser.add_argument('--image_size', default=image_size, type=int)
parser.add_argument('--style_size', default=main_style_size, type=int)
parser.add_argument('--blocks_style_size', default=blocks_style_size, type=int)
parser.add_argument('--learning_rate_main', default=learning_rate_main, type=float)
parser.add_argument('--learning_rate_blocks', default=learning_rate_blocks, type=float)
parser.add_argument('--main_content_wight', default=main_content_wight, type=float)
parser.add_argument('--main_style_wight', default=main_style_wight, type=float)
parser.add_argument('--checkpoint_iter', default=checkpoint_iter, type=int)
parser.add_argument('--eval_iter', default=eval_iter, type=int)
parser.add_argument('--intermediate_images_iter', default=intermediate_images_iter, type=int)
parser.add_argument('--current_batch_eval_iter', default=current_batch_eval_iter, type=int)
parser.add_argument('--train_data_path', default=train_data_path)
parser.add_argument('--val_data_path', default=val_data_path)
parser.add_argument('--model_name', default=model_top_params)
parser.add_argument('--experiments_dir_name', default=experiments_dir_name)
parser.add_argument('--checkpoint_dir', default=checkpoint_dir)
parser.add_argument('--model_save_dir', default=model_save_dir)
parser.add_argument('--images_save_dir', default=images_save_dir)
parser.add_argument('--pre_trained_main_model', default=pre_trained_main_model)
parser.add_argument('--main_style_image_path', default=main_style_image_path)
parser.add_argument('--evaluation_images_path', default=evaluation_images_path)
parser.add_argument('--vgg_output', default=vgg_output, type=lambda x:bool(utils.str2bool(x)))
parser.add_argument('--style_wight0', default=style_wight0, type=float)
parser.add_argument('--style_wight1', default=style_wight1, type=float)
parser.add_argument('--style_wight2', default=style_wight2, type=float)
parser.add_argument('--style_wight3', default=style_wight3, type=float)
parser.add_argument('--style_wight4', default=style_wight4, type=float)
parser.add_argument('--training_scheme', default=training_scheme)
parser.add_argument('--network_version', default=network_version)
if network_version is 'dual':
parser.add_argument('--blocks_lower_content_wight', default=blocks_lower_content_wight, type=float)
parser.add_argument('--blocks_lower_style_wight', default=blocks_lower_style_wight, type=float)
parser.add_argument('--blocks_higher_content_wight', default=blocks_higher_content_wight, type=float)
parser.add_argument('--blocks_higher_style_wight', default=blocks_higher_style_wight, type=float)
parser.add_argument('--tuning_blocks_lower_style_image_name', default=tuning_blocks_lower_style_image_name)
parser.add_argument('--tuning_blocks_higher_style_image_name', default=tuning_blocks_higher_style_image_name)
parser.add_argument('--tuning_blocks_lower_style_image_path', default=tuning_blocks_lower_style_image_path)
parser.add_argument('--tuning_blocks_higher_style_image_path', default=tuning_blocks_higher_style_image_path)
parser.add_argument('--pre_trained_tuning_blocks_lower', default=pre_trained_tuning_blocks_lower)
parser.add_argument('--pre_trained_tuning_blocks_higher', default=pre_trained_tuning_blocks_higher)
elif network_version is 'normal':
parser.add_argument('--blocks_content_wight', default=blocks_content_wight, type=float)
parser.add_argument('--blocks_style_wight', default=blocks_style_wight, type=float)
parser.add_argument('--block_style_image_name', default=tuning_blocks_style_image_name)
parser.add_argument('--tuning_blocks_style_image_path', default=tuning_blocks_style_image_path)
opt = parser.parse_args()
return opt | 0.313315 | 0.111749 |
import argparse
import logging
import os
import subprocess
from copy import deepcopy
from decli import cli
from ..config.project_config import ProjectConfig
from ..config.run_config import RunConfig
from ..http.api.endpoints import app
from ..tasks.init import CreateProject
from ..tasks.run import StartWebserver
class Cli:
@staticmethod
def get_cli_spec():
return {
"prog": "windmill",
"description": "Drag'N'Drop web app to build and manage Airflow DAGs",
"subcommands": {
"title": "positional arguments",
"description": "Run 'windmill <arg> --help' for further details",
"commands": [
{
"name": "init",
"help": "Creates a new windmill project",
"func": Cli.init,
"arguments": ProjectConfig.to_cli_args(),
},
{
"name": "run",
"help": "Start Windmill server from a project folder",
"func": Cli.run_server,
"arguments": RunConfig.to_cli_args(),
},
],
},
}
@classmethod
def init(cls, *args, **kwargs):
try:
project = ProjectConfig.load(*args, **kwargs)
CreateProject(project)
except Exception as e:
logging.error(f"Unable to create project ({e}) - aborting")
return e
@classmethod
def run_server(cls, *args, **kwargs):
try:
run_config = RunConfig.load(*args, **kwargs)
StartWebserver(run_config)
except Exception as e:
logging.error(f"Unable to start webserver ({e}) - aborting")
@staticmethod
def run_cli():
return run_parser(get_parser(Cli.get_cli_spec()))
class DevCli:
@staticmethod
def get_cli_spec():
return {
"prog": "windmill",
"description": "Dev commands for working on Windmill",
"subcommands": {
"title": "positional arguments",
"description": "Run 'windmill <arg> --help' for further details",
"commands": [
{
"name": "start-backend",
"help": "Starts the backend flask server with CORS enabled",
"func": DevCli.start_backend,
"arguments": RunConfig.to_cli_args(),
},
{
"name": "start-frontend",
"help": "Starts the frontend react server using npm build",
"func": DevCli.start_frontend,
"arguments": [],
},
],
},
}
@staticmethod
def start_backend(*args, **kwargs):
try:
wd = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
print("Deleting existing windmill dev project")
p = subprocess.Popen(
["rm", "-rf", ".windmill-temp-project/"], cwd=wd, stdout=subprocess.PIPE
)
p.communicate()
print("Creating new project")
p = subprocess.Popen(
["windmill", "init", "--name", ".windmill-temp-project"],
cwd=wd,
stdout=subprocess.PIPE,
)
p.communicate()
print("Starting dev backend")
os.chdir(
os.path.abspath(
os.path.join(
os.path.dirname(__file__), "..", "..", ".windmill-temp-project/"
)
)
)
run_config = RunConfig.load(run_dev_server=True, *args, **kwargs)
StartWebserver(run_config)
except Exception as e:
logging.error(f"Unable to start webserver ({e}) - aborting")
@staticmethod
def start_frontend(**kwargs):
wd = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "http", "app")
)
with subprocess.Popen(["npm", "start"], cwd=wd, stdout=subprocess.PIPE):
print("Running frontend on http://localhost:1234")
@staticmethod
def run_cli():
return run_parser(get_parser(DevCli.get_cli_spec()))
def get_parser(cli_spec) -> argparse.ArgumentParser:
cli_spec["formatter_class"] = argparse.ArgumentDefaultsHelpFormatter
return cli(cli_spec)
def run_parser(parser):
args = parser.parse_args()
try:
args.func(**vars(args))
except AttributeError:
print(f"Error parsing args `{vars(args) or 'None'}`")
parser.print_help() | windmill/cli/cli.py | import argparse
import logging
import os
import subprocess
from copy import deepcopy
from decli import cli
from ..config.project_config import ProjectConfig
from ..config.run_config import RunConfig
from ..http.api.endpoints import app
from ..tasks.init import CreateProject
from ..tasks.run import StartWebserver
class Cli:
@staticmethod
def get_cli_spec():
return {
"prog": "windmill",
"description": "Drag'N'Drop web app to build and manage Airflow DAGs",
"subcommands": {
"title": "positional arguments",
"description": "Run 'windmill <arg> --help' for further details",
"commands": [
{
"name": "init",
"help": "Creates a new windmill project",
"func": Cli.init,
"arguments": ProjectConfig.to_cli_args(),
},
{
"name": "run",
"help": "Start Windmill server from a project folder",
"func": Cli.run_server,
"arguments": RunConfig.to_cli_args(),
},
],
},
}
@classmethod
def init(cls, *args, **kwargs):
try:
project = ProjectConfig.load(*args, **kwargs)
CreateProject(project)
except Exception as e:
logging.error(f"Unable to create project ({e}) - aborting")
return e
@classmethod
def run_server(cls, *args, **kwargs):
try:
run_config = RunConfig.load(*args, **kwargs)
StartWebserver(run_config)
except Exception as e:
logging.error(f"Unable to start webserver ({e}) - aborting")
@staticmethod
def run_cli():
return run_parser(get_parser(Cli.get_cli_spec()))
class DevCli:
@staticmethod
def get_cli_spec():
return {
"prog": "windmill",
"description": "Dev commands for working on Windmill",
"subcommands": {
"title": "positional arguments",
"description": "Run 'windmill <arg> --help' for further details",
"commands": [
{
"name": "start-backend",
"help": "Starts the backend flask server with CORS enabled",
"func": DevCli.start_backend,
"arguments": RunConfig.to_cli_args(),
},
{
"name": "start-frontend",
"help": "Starts the frontend react server using npm build",
"func": DevCli.start_frontend,
"arguments": [],
},
],
},
}
@staticmethod
def start_backend(*args, **kwargs):
try:
wd = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", ".."))
print("Deleting existing windmill dev project")
p = subprocess.Popen(
["rm", "-rf", ".windmill-temp-project/"], cwd=wd, stdout=subprocess.PIPE
)
p.communicate()
print("Creating new project")
p = subprocess.Popen(
["windmill", "init", "--name", ".windmill-temp-project"],
cwd=wd,
stdout=subprocess.PIPE,
)
p.communicate()
print("Starting dev backend")
os.chdir(
os.path.abspath(
os.path.join(
os.path.dirname(__file__), "..", "..", ".windmill-temp-project/"
)
)
)
run_config = RunConfig.load(run_dev_server=True, *args, **kwargs)
StartWebserver(run_config)
except Exception as e:
logging.error(f"Unable to start webserver ({e}) - aborting")
@staticmethod
def start_frontend(**kwargs):
wd = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "http", "app")
)
with subprocess.Popen(["npm", "start"], cwd=wd, stdout=subprocess.PIPE):
print("Running frontend on http://localhost:1234")
@staticmethod
def run_cli():
return run_parser(get_parser(DevCli.get_cli_spec()))
def get_parser(cli_spec) -> argparse.ArgumentParser:
cli_spec["formatter_class"] = argparse.ArgumentDefaultsHelpFormatter
return cli(cli_spec)
def run_parser(parser):
args = parser.parse_args()
try:
args.func(**vars(args))
except AttributeError:
print(f"Error parsing args `{vars(args) or 'None'}`")
parser.print_help() | 0.38549 | 0.152694 |
import statistics, collections
POS_KEY = "POS"
UNIV_FEATURES = [
"PronType", "NumType", "Poss", "Reflex", "Foreign", "Abbr", "Gender",
"Animacy", "Number", "Case", "Definite", "Degree", "VerbForm", "Mood",
"Tense", "Aspect", "Voice", "Evident", "Polarity", "Person", "Polite"
]
def f1(corr, gold, obs):
if gold <= 0 or obs <= 0 or corr <= 0:
return 0
rec = corr / gold
pre = corr / obs
return (2 * rec * pre) / (rec + pre)
class Evaluator(object):
'''
Aggregates and evaluates attribute scores
:param mode: one of 'by_feats', 'by_values', 'exact' - 'by_feats' pools scores by attribute over values, 'by_values' uses separate scores for each <attribute, value> pair, 'exact' pools scores by each distinct string of all concatenated attribute.value pairs
:param only_univ: only uses the features evaluated in CoNLL18, i.e. those listed in UNIV_FEATURES
'''
def __init__(self, mode="by_feats", only_univ=False):
self.instance_count = 0
self.mode = mode
self.only_univ = only_univ
self.correct = collections.defaultdict(int)
self.gold = collections.defaultdict(int)
self.observed = collections.defaultdict(int)
def keys(self):
return self.gold.keys() | self.observed.keys()
def add_instance(self, g, o):
'''
:param g: - gold annotation for instance (key-value dict)
:param o: - observed (inferred) annotation for instance (key-value dict)
'''
self.instance_count = self.instance_count + 1
if self.mode == "exact":
if self.only_univ:
gkey = "|".join(["=".join(x) for x in sorted(g.items()) if x[0] == POS_KEY or x[0] in UNIV_FEATURES])
okey = "|".join(["=".join(x) for x in sorted(o.items()) if x[0] == POS_KEY or x[0] in UNIV_FEATURES])
else:
gkey = "|".join(["=".join(x) for x in sorted(g.items())])
okey = "|".join(["=".join(x) for x in sorted(o.items())])
self.gold[gkey] += 1
self.observed[okey] += 1
if gkey == okey:
self.correct[gkey] += 1
else:
for (k, v) in g.items():
if self.only_univ and k != POS_KEY and k not in UNIV_FEATURES:
continue
key = (k, v) if self.mode == "by_values" else k
if k in o and o[k] == v:
self.correct[key] += 1
self.gold[key] += 1
for (k, v) in o.items():
if self.only_univ and k != POS_KEY and k not in UNIV_FEATURES:
continue
key = (k, v) if self.mode == "by_values" else k
self.observed[key] += 1
def micro_f1(self, att=None, excl=[]):
'''
Micro F1
:param att: get f1 for specific attribute (exact match)
:param excl: get f1 for all attributes except those listed
'''
if att is not None:
return f1(self.correct[att], self.gold[att], self.observed[att])
else:
keys = self.gold.keys() | self.observed.keys()
if excl is not None:
if self.mode == "by_values":
keys = [k for k in keys if k[0] not in excl]
else:
keys = [k for k in keys if k not in excl]
return f1(
sum([self.correct[att] for att in self.correct if att in keys]),
sum([self.gold[att] for att in self.gold if att in keys]),
sum([self.observed[att] for att in self.observed if att in keys])
)
def macro_f1(self, excl=[]):
'''
Macro F1
:param excl: get f1 for all attributes except those listed
'''
keys = self.gold.keys() | self.observed.keys()
if excl is not None:
if self.mode == "by_values":
keys = [k for k in keys if k[0] not in excl]
else:
keys = [k for k in keys if k not in excl]
return statistics.mean([f1(self.correct[k], self.gold[k], self.observed[k]) for k in keys])
def acc(self, att=None):
'''
Accuracy
'''
if self.instance_count <= 0:
return 0.0
if att is not None:
if self.mode == "by_values":
corr = sum([self.correct[k] for k in self.correct if k[0] == att])
gold = sum([self.gold[k] for k in self.gold if k[0] == att])
return corr / gold
elif self.gold[att] == 0:
return 0.0
else:
return self.correct[att] / self.gold[att]
else:
corr = sum(self.correct.values())
gold = sum(self.gold.values())
return corr / gold
def f1(self, corr, gold, obs):
if gold <= 0 or obs <= 0 or corr <= 0:
return 0
r = corr / gold
p = corr / obs
return (2 * r * p) / (r + p) | evaluator.py |
import statistics, collections
POS_KEY = "POS"
UNIV_FEATURES = [
"PronType", "NumType", "Poss", "Reflex", "Foreign", "Abbr", "Gender",
"Animacy", "Number", "Case", "Definite", "Degree", "VerbForm", "Mood",
"Tense", "Aspect", "Voice", "Evident", "Polarity", "Person", "Polite"
]
def f1(corr, gold, obs):
if gold <= 0 or obs <= 0 or corr <= 0:
return 0
rec = corr / gold
pre = corr / obs
return (2 * rec * pre) / (rec + pre)
class Evaluator(object):
'''
Aggregates and evaluates attribute scores
:param mode: one of 'by_feats', 'by_values', 'exact' - 'by_feats' pools scores by attribute over values, 'by_values' uses separate scores for each <attribute, value> pair, 'exact' pools scores by each distinct string of all concatenated attribute.value pairs
:param only_univ: only uses the features evaluated in CoNLL18, i.e. those listed in UNIV_FEATURES
'''
def __init__(self, mode="by_feats", only_univ=False):
self.instance_count = 0
self.mode = mode
self.only_univ = only_univ
self.correct = collections.defaultdict(int)
self.gold = collections.defaultdict(int)
self.observed = collections.defaultdict(int)
def keys(self):
return self.gold.keys() | self.observed.keys()
def add_instance(self, g, o):
'''
:param g: - gold annotation for instance (key-value dict)
:param o: - observed (inferred) annotation for instance (key-value dict)
'''
self.instance_count = self.instance_count + 1
if self.mode == "exact":
if self.only_univ:
gkey = "|".join(["=".join(x) for x in sorted(g.items()) if x[0] == POS_KEY or x[0] in UNIV_FEATURES])
okey = "|".join(["=".join(x) for x in sorted(o.items()) if x[0] == POS_KEY or x[0] in UNIV_FEATURES])
else:
gkey = "|".join(["=".join(x) for x in sorted(g.items())])
okey = "|".join(["=".join(x) for x in sorted(o.items())])
self.gold[gkey] += 1
self.observed[okey] += 1
if gkey == okey:
self.correct[gkey] += 1
else:
for (k, v) in g.items():
if self.only_univ and k != POS_KEY and k not in UNIV_FEATURES:
continue
key = (k, v) if self.mode == "by_values" else k
if k in o and o[k] == v:
self.correct[key] += 1
self.gold[key] += 1
for (k, v) in o.items():
if self.only_univ and k != POS_KEY and k not in UNIV_FEATURES:
continue
key = (k, v) if self.mode == "by_values" else k
self.observed[key] += 1
def micro_f1(self, att=None, excl=[]):
'''
Micro F1
:param att: get f1 for specific attribute (exact match)
:param excl: get f1 for all attributes except those listed
'''
if att is not None:
return f1(self.correct[att], self.gold[att], self.observed[att])
else:
keys = self.gold.keys() | self.observed.keys()
if excl is not None:
if self.mode == "by_values":
keys = [k for k in keys if k[0] not in excl]
else:
keys = [k for k in keys if k not in excl]
return f1(
sum([self.correct[att] for att in self.correct if att in keys]),
sum([self.gold[att] for att in self.gold if att in keys]),
sum([self.observed[att] for att in self.observed if att in keys])
)
def macro_f1(self, excl=[]):
'''
Macro F1
:param excl: get f1 for all attributes except those listed
'''
keys = self.gold.keys() | self.observed.keys()
if excl is not None:
if self.mode == "by_values":
keys = [k for k in keys if k[0] not in excl]
else:
keys = [k for k in keys if k not in excl]
return statistics.mean([f1(self.correct[k], self.gold[k], self.observed[k]) for k in keys])
def acc(self, att=None):
'''
Accuracy
'''
if self.instance_count <= 0:
return 0.0
if att is not None:
if self.mode == "by_values":
corr = sum([self.correct[k] for k in self.correct if k[0] == att])
gold = sum([self.gold[k] for k in self.gold if k[0] == att])
return corr / gold
elif self.gold[att] == 0:
return 0.0
else:
return self.correct[att] / self.gold[att]
else:
corr = sum(self.correct.values())
gold = sum(self.gold.values())
return corr / gold
def f1(self, corr, gold, obs):
if gold <= 0 or obs <= 0 or corr <= 0:
return 0
r = corr / gold
p = corr / obs
return (2 * r * p) / (r + p) | 0.787032 | 0.39004 |
import logging
import shutil
from copy import deepcopy
from typing import Dict, Any
import h5py
from Bio import SeqIO
from pandas import read_csv, DataFrame
from tqdm import tqdm
from bio_embeddings.embed import (
ProtTransAlbertBFDEmbedder,
ProtTransBertBFDEmbedder,
EmbedderInterface,
SeqVecEmbedder,
ProtTransXLNetUniRef100Embedder,
UniRepEmbedder,
ESMEmbedder,
CPCProtEmbedder,
)
from bio_embeddings.utilities import (
InvalidParameterError,
get_model_file,
check_required,
get_file_manager,
get_model_directories_from_zip,
FileManagerInterface,
)
from bio_embeddings.utilities.backports import nullcontext
logger = logging.getLogger(__name__)
def _print_expected_file_sizes(
embedder: EmbedderInterface, mapping_file: DataFrame, result_kwargs: Dict[str, Any]
) -> None:
"""
Logs the lower bound size of embeddings_file and reduced_embedding_file
:param embedder: the embedder being used
:param mapping_file: the mapping file of the sequences
:param result_kwargs: the kwargs passed to the pipeline --> will decide what to print
:return: Nothing.
"""
per_amino_acid_size_in_bytes = 4 * embedder.embedding_dimension * embedder.number_of_layers
per_protein_size_in_bytes = 4 * embedder.embedding_dimension
total_number_of_proteins = len(mapping_file)
total_aa = mapping_file['sequence_length'].sum()
embeddings_file_size_in_MB = per_amino_acid_size_in_bytes * total_aa * pow(10, -6)
reduced_embeddings_file_size_in_MB = per_protein_size_in_bytes * total_number_of_proteins * pow(10, -6)
required_space_in_MB = 0
if result_kwargs.get("reduce") is True:
logger.info(f"The minimum expected size for the reduced_embedding_file is "
f"{reduced_embeddings_file_size_in_MB:.3f}MB.")
required_space_in_MB += reduced_embeddings_file_size_in_MB
if not (result_kwargs.get("reduce") is True and result_kwargs.get("discard_per_amino_acid_embeddings") is True):
logger.info(f"The minimum expected size for the embedding_file is {embeddings_file_size_in_MB:.3f}MB.")
required_space_in_MB += embeddings_file_size_in_MB
_, _, available_space_in_bytes = shutil.disk_usage(result_kwargs.get('prefix'))
available_space_in_MB = available_space_in_bytes * pow(10, -6)
if available_space_in_MB < required_space_in_MB:
logger.warning(f"You are attempting to generate {required_space_in_MB:.3f}MB worth of embeddings, "
f"but only {available_space_in_MB:.3f}MB are available at "
f"the prefix({result_kwargs.get('prefix')}). \n"
f"We suggest you stop execution NOW and double check you have enough free space available. "
f"Alternatively, try reducing the input FASTA file.")
else:
logger.info(f"You are going to generate a total of {required_space_in_MB:.3f}MB of embeddings, and have "
f"{available_space_in_MB:.3f}MB available at {result_kwargs.get('prefix')}.")
def _get_reduced_embeddings_file_context(
file_manager: FileManagerInterface, result_kwargs: Dict[str, Any]
):
"""
:param file_manager: The FileManager derived class which will be used to create the file
:param result_kwargs: A dictionary which will be updated in-place to include the path to the newly created file
:return: a file context
"""
# Create reduced embeddings file if set in params
result_kwargs.setdefault("reduce", False)
if result_kwargs["reduce"] is True:
reduced_embeddings_file_path = file_manager.create_file(
result_kwargs.get("prefix"),
result_kwargs.get("stage_name"),
"reduced_embeddings_file",
extension=".h5",
)
result_kwargs["reduced_embeddings_file"] = reduced_embeddings_file_path
return h5py.File(reduced_embeddings_file_path, "w")
return nullcontext()
def _get_embeddings_file_context(
file_manager: FileManagerInterface, result_kwargs: Dict[str, Any]
):
"""
:param file_manager: The FileManager derived class which will be used to create the file
:param result_kwargs: A dictionary which will be updated in-place to include the path to the newly created file
:return: a file context
"""
result_kwargs.setdefault("discard_per_amino_acid_embeddings", False)
if result_kwargs["discard_per_amino_acid_embeddings"] is True:
if result_kwargs["reduce"] is False:
raise InvalidParameterError(
"Cannot have discard_per_amino_acid_embeddings=True and reduce=False. Both must be True."
)
return nullcontext()
else:
embeddings_file_path = file_manager.create_file(
result_kwargs.get("prefix"),
result_kwargs.get("stage_name"),
"embeddings_file",
extension=".h5",
)
result_kwargs["embeddings_file"] = embeddings_file_path
return h5py.File(embeddings_file_path, "w")
def embed_and_write_batched(
embedder: EmbedderInterface,
file_manager: FileManagerInterface,
result_kwargs: Dict[str, Any],
) -> Dict[str, Any]:
""" The shared code between the SeqVec, Albert, Bert and XLNet pipelines """
# Lazy fasta file reader. The mapping file contains the corresponding ids in the same order
sequences = (
str(entry.seq)
for entry in SeqIO.parse(result_kwargs["remapped_sequences_file"], "fasta")
)
# We want to read the unnamed column 0 as str (esp. with simple_remapping), which requires some workarounds
# https://stackoverflow.com/a/29793294/3549270
mapping_file = read_csv(result_kwargs["mapping_file"], index_col=0)
mapping_file.index = mapping_file.index.astype('str')
# Print the minimum required file sizes
_print_expected_file_sizes(embedder, mapping_file, result_kwargs)
# Open embedding files or null contexts and iteratively save embeddings to file
with _get_embeddings_file_context(
file_manager, result_kwargs
) as embeddings_file, _get_reduced_embeddings_file_context(
file_manager, result_kwargs
) as reduced_embeddings_file:
embedding_generator = embedder.embed_many(
sequences, result_kwargs.get("max_amino_acids")
)
for sequence_id, original_id, embedding in zip(
mapping_file.index,
mapping_file["original_id"],
tqdm(embedding_generator, total=len(mapping_file))
):
if result_kwargs.get("discard_per_amino_acid_embeddings") is False:
dataset = embeddings_file.create_dataset(sequence_id, data=embedding)
dataset.attrs["original_id"] = original_id
if result_kwargs.get("reduce") is True:
dataset = reduced_embeddings_file.create_dataset(
sequence_id, data=embedder.reduce_per_protein(embedding)
)
dataset.attrs["original_id"] = original_id
return result_kwargs
PROTOCOLS = {
"seqvec": SeqVecEmbedder,
"prottrans_albert_bfd": ProtTransAlbertBFDEmbedder,
"prottrans_bert_bfd": ProtTransBertBFDEmbedder,
"prottrans_xlnet_uniref100": ProtTransXLNetUniRef100Embedder,
"unirep": UniRepEmbedder,
"esm": ESMEmbedder,
"cpcprot": CPCProtEmbedder
}
# TODO: 10000 is a random guess
# There remainder was measured for a GTX 1080 with 8GB memory
DEFAULT_MAX_AMINO_ACIDS = {
"seqvec": 15000,
"prottrans_albert_bfd": 3035,
"prottrans_bert_bfd": 6024,
"prottrans_xlnet_uniref100": 4000,
"unirep": 10000,
"esm": 10000,
"cpcprot": 10000,
}
def run(**kwargs):
"""
Run embedding protocol
Parameters
----------
kwargs arguments (* denotes optional):
sequences_file: Where sequences live
prefix: Output prefix for all generated files
protocol: Which embedder to use
mapping_file: the mapping file generated by the pipeline when remapping indexes
stage_name: The stage name
Returns
-------
Dictionary with results of stage
"""
check_required(
kwargs,
["protocol", "prefix", "stage_name", "remapped_sequences_file", "mapping_file"],
)
if kwargs["protocol"] not in PROTOCOLS:
raise InvalidParameterError(
"Invalid protocol selection: {}. Valid protocols are: {}".format(
kwargs["protocol"], ", ".join(PROTOCOLS.keys())
)
)
embedder_class = PROTOCOLS[kwargs["protocol"]]
if embedder_class == UniRepEmbedder and kwargs.get("use_cpu") is not None:
raise InvalidParameterError("UniRep does not support configuring `use_cpu`")
result_kwargs = deepcopy(kwargs)
# Download necessary files if needed
# noinspection PyProtectedMember
for file in embedder_class._necessary_files:
if not result_kwargs.get(file):
result_kwargs[file] = get_model_file(model=embedder_class.name, file=file)
# noinspection PyProtectedMember
for directory in embedder_class._necessary_directories:
if not result_kwargs.get(directory):
result_kwargs[directory] = get_model_directories_from_zip(
model=embedder_class.name, directory=directory
)
result_kwargs.setdefault("max_amino_acids", DEFAULT_MAX_AMINO_ACIDS[kwargs["protocol"]])
file_manager = get_file_manager(**kwargs)
embedder: EmbedderInterface = embedder_class(**result_kwargs)
return embed_and_write_batched(embedder, file_manager, result_kwargs) | bio_embeddings/embed/pipeline.py | import logging
import shutil
from copy import deepcopy
from typing import Dict, Any
import h5py
from Bio import SeqIO
from pandas import read_csv, DataFrame
from tqdm import tqdm
from bio_embeddings.embed import (
ProtTransAlbertBFDEmbedder,
ProtTransBertBFDEmbedder,
EmbedderInterface,
SeqVecEmbedder,
ProtTransXLNetUniRef100Embedder,
UniRepEmbedder,
ESMEmbedder,
CPCProtEmbedder,
)
from bio_embeddings.utilities import (
InvalidParameterError,
get_model_file,
check_required,
get_file_manager,
get_model_directories_from_zip,
FileManagerInterface,
)
from bio_embeddings.utilities.backports import nullcontext
logger = logging.getLogger(__name__)
def _print_expected_file_sizes(
embedder: EmbedderInterface, mapping_file: DataFrame, result_kwargs: Dict[str, Any]
) -> None:
"""
Logs the lower bound size of embeddings_file and reduced_embedding_file
:param embedder: the embedder being used
:param mapping_file: the mapping file of the sequences
:param result_kwargs: the kwargs passed to the pipeline --> will decide what to print
:return: Nothing.
"""
per_amino_acid_size_in_bytes = 4 * embedder.embedding_dimension * embedder.number_of_layers
per_protein_size_in_bytes = 4 * embedder.embedding_dimension
total_number_of_proteins = len(mapping_file)
total_aa = mapping_file['sequence_length'].sum()
embeddings_file_size_in_MB = per_amino_acid_size_in_bytes * total_aa * pow(10, -6)
reduced_embeddings_file_size_in_MB = per_protein_size_in_bytes * total_number_of_proteins * pow(10, -6)
required_space_in_MB = 0
if result_kwargs.get("reduce") is True:
logger.info(f"The minimum expected size for the reduced_embedding_file is "
f"{reduced_embeddings_file_size_in_MB:.3f}MB.")
required_space_in_MB += reduced_embeddings_file_size_in_MB
if not (result_kwargs.get("reduce") is True and result_kwargs.get("discard_per_amino_acid_embeddings") is True):
logger.info(f"The minimum expected size for the embedding_file is {embeddings_file_size_in_MB:.3f}MB.")
required_space_in_MB += embeddings_file_size_in_MB
_, _, available_space_in_bytes = shutil.disk_usage(result_kwargs.get('prefix'))
available_space_in_MB = available_space_in_bytes * pow(10, -6)
if available_space_in_MB < required_space_in_MB:
logger.warning(f"You are attempting to generate {required_space_in_MB:.3f}MB worth of embeddings, "
f"but only {available_space_in_MB:.3f}MB are available at "
f"the prefix({result_kwargs.get('prefix')}). \n"
f"We suggest you stop execution NOW and double check you have enough free space available. "
f"Alternatively, try reducing the input FASTA file.")
else:
logger.info(f"You are going to generate a total of {required_space_in_MB:.3f}MB of embeddings, and have "
f"{available_space_in_MB:.3f}MB available at {result_kwargs.get('prefix')}.")
def _get_reduced_embeddings_file_context(
file_manager: FileManagerInterface, result_kwargs: Dict[str, Any]
):
"""
:param file_manager: The FileManager derived class which will be used to create the file
:param result_kwargs: A dictionary which will be updated in-place to include the path to the newly created file
:return: a file context
"""
# Create reduced embeddings file if set in params
result_kwargs.setdefault("reduce", False)
if result_kwargs["reduce"] is True:
reduced_embeddings_file_path = file_manager.create_file(
result_kwargs.get("prefix"),
result_kwargs.get("stage_name"),
"reduced_embeddings_file",
extension=".h5",
)
result_kwargs["reduced_embeddings_file"] = reduced_embeddings_file_path
return h5py.File(reduced_embeddings_file_path, "w")
return nullcontext()
def _get_embeddings_file_context(
file_manager: FileManagerInterface, result_kwargs: Dict[str, Any]
):
"""
:param file_manager: The FileManager derived class which will be used to create the file
:param result_kwargs: A dictionary which will be updated in-place to include the path to the newly created file
:return: a file context
"""
result_kwargs.setdefault("discard_per_amino_acid_embeddings", False)
if result_kwargs["discard_per_amino_acid_embeddings"] is True:
if result_kwargs["reduce"] is False:
raise InvalidParameterError(
"Cannot have discard_per_amino_acid_embeddings=True and reduce=False. Both must be True."
)
return nullcontext()
else:
embeddings_file_path = file_manager.create_file(
result_kwargs.get("prefix"),
result_kwargs.get("stage_name"),
"embeddings_file",
extension=".h5",
)
result_kwargs["embeddings_file"] = embeddings_file_path
return h5py.File(embeddings_file_path, "w")
def embed_and_write_batched(
embedder: EmbedderInterface,
file_manager: FileManagerInterface,
result_kwargs: Dict[str, Any],
) -> Dict[str, Any]:
""" The shared code between the SeqVec, Albert, Bert and XLNet pipelines """
# Lazy fasta file reader. The mapping file contains the corresponding ids in the same order
sequences = (
str(entry.seq)
for entry in SeqIO.parse(result_kwargs["remapped_sequences_file"], "fasta")
)
# We want to read the unnamed column 0 as str (esp. with simple_remapping), which requires some workarounds
# https://stackoverflow.com/a/29793294/3549270
mapping_file = read_csv(result_kwargs["mapping_file"], index_col=0)
mapping_file.index = mapping_file.index.astype('str')
# Print the minimum required file sizes
_print_expected_file_sizes(embedder, mapping_file, result_kwargs)
# Open embedding files or null contexts and iteratively save embeddings to file
with _get_embeddings_file_context(
file_manager, result_kwargs
) as embeddings_file, _get_reduced_embeddings_file_context(
file_manager, result_kwargs
) as reduced_embeddings_file:
embedding_generator = embedder.embed_many(
sequences, result_kwargs.get("max_amino_acids")
)
for sequence_id, original_id, embedding in zip(
mapping_file.index,
mapping_file["original_id"],
tqdm(embedding_generator, total=len(mapping_file))
):
if result_kwargs.get("discard_per_amino_acid_embeddings") is False:
dataset = embeddings_file.create_dataset(sequence_id, data=embedding)
dataset.attrs["original_id"] = original_id
if result_kwargs.get("reduce") is True:
dataset = reduced_embeddings_file.create_dataset(
sequence_id, data=embedder.reduce_per_protein(embedding)
)
dataset.attrs["original_id"] = original_id
return result_kwargs
PROTOCOLS = {
"seqvec": SeqVecEmbedder,
"prottrans_albert_bfd": ProtTransAlbertBFDEmbedder,
"prottrans_bert_bfd": ProtTransBertBFDEmbedder,
"prottrans_xlnet_uniref100": ProtTransXLNetUniRef100Embedder,
"unirep": UniRepEmbedder,
"esm": ESMEmbedder,
"cpcprot": CPCProtEmbedder
}
# TODO: 10000 is a random guess
# There remainder was measured for a GTX 1080 with 8GB memory
DEFAULT_MAX_AMINO_ACIDS = {
"seqvec": 15000,
"prottrans_albert_bfd": 3035,
"prottrans_bert_bfd": 6024,
"prottrans_xlnet_uniref100": 4000,
"unirep": 10000,
"esm": 10000,
"cpcprot": 10000,
}
def run(**kwargs):
"""
Run embedding protocol
Parameters
----------
kwargs arguments (* denotes optional):
sequences_file: Where sequences live
prefix: Output prefix for all generated files
protocol: Which embedder to use
mapping_file: the mapping file generated by the pipeline when remapping indexes
stage_name: The stage name
Returns
-------
Dictionary with results of stage
"""
check_required(
kwargs,
["protocol", "prefix", "stage_name", "remapped_sequences_file", "mapping_file"],
)
if kwargs["protocol"] not in PROTOCOLS:
raise InvalidParameterError(
"Invalid protocol selection: {}. Valid protocols are: {}".format(
kwargs["protocol"], ", ".join(PROTOCOLS.keys())
)
)
embedder_class = PROTOCOLS[kwargs["protocol"]]
if embedder_class == UniRepEmbedder and kwargs.get("use_cpu") is not None:
raise InvalidParameterError("UniRep does not support configuring `use_cpu`")
result_kwargs = deepcopy(kwargs)
# Download necessary files if needed
# noinspection PyProtectedMember
for file in embedder_class._necessary_files:
if not result_kwargs.get(file):
result_kwargs[file] = get_model_file(model=embedder_class.name, file=file)
# noinspection PyProtectedMember
for directory in embedder_class._necessary_directories:
if not result_kwargs.get(directory):
result_kwargs[directory] = get_model_directories_from_zip(
model=embedder_class.name, directory=directory
)
result_kwargs.setdefault("max_amino_acids", DEFAULT_MAX_AMINO_ACIDS[kwargs["protocol"]])
file_manager = get_file_manager(**kwargs)
embedder: EmbedderInterface = embedder_class(**result_kwargs)
return embed_and_write_batched(embedder, file_manager, result_kwargs) | 0.826712 | 0.188473 |
import os, sys, time
import kcore.webserver_circpy as W
import kcore.common as C
import kcore.html as H
import kcore.gpio as G
import kcore.neo as N
import kcore.varz as V
# circuitpy_sim
import board
CIRCUITPYTHON = 'boot_out.txt' in os.listdir('/')
# ---------- handlers
WEB_HANDLERS = {
'/context': lambda request: request.context.get('c'),
'/get': lambda request: request.get_params.get('g'),
'/hi': lambda request: 'hello world',
'/hi2': lambda request: H.wrap('hello world', 'p'),
'/kb1': lambda request: str(request.context.get('kb1').value()),
'/logfun': lambda request: logfun(request),
r'/match/(\w+)': lambda request: request.route_match_groups[0],
'/neoflash': lambda request: neoflash(request),
'/ra': lambda request: str(request.remote_address),
'/vset': lambda request: vset(request),
}
def logfun(request):
C.clear_log() # in-case it's gotten too long, and just to make sure it works.
C.log('logfun')
return 'ok'
def neoflash(request):
neo = request.context.get('neo')
neo[0] = N.RED
time.sleep(0.2)
neo[0] = N.GREEN
time.sleep(0.2)
neo[0] = N.PURPLE
time.sleep(0.2)
neo[0] = N.OFF
return 'ok'
def vset(request):
for k, v in request.get_params.items(): V.set(k, v)
return str(len(request.get_params))
# ---------- main
def create_ws(port):
G.init()
kb1 = G.KButton(board.D0, name='D0', background=not CIRCUITPYTHON)
neo = N.Neo(n=1, pin=board.NEOPIXEL)
ctx = {'c': 'hello', 'kb1': kb1, 'neo': neo}
ws = W.WebServer(WEB_HANDLERS, wrap_handlers=False, port=port, blocking=True, context=ctx)
return ws
# This part only runsif this file is main.py on real CircuitPy hardware.
# (when running locally, the test calls create_ws() direclty.
def main():
try:
import wifi_secrets as S
print(f'{time.time()}: connecting to wifi...')
W.connect_wifi(S.DHCP_HOSTNAME, S.SSID, S.WIFI_PASSWORD)
except Exception as e:
print('Unable to connect to wifi; skipping: ' + str(e), file=sys.stderr)
ws = create_ws(port=8080)
print(f'{time.time()}: starting web server')
while True:
status = ws.listen()
print(f'{time.time()}: main loop; status={status}')
time.sleep(0.3) # Don't loop too fast...
if __name__ == '__main__':
main() | pylib/tests/kcore/server.py | import os, sys, time
import kcore.webserver_circpy as W
import kcore.common as C
import kcore.html as H
import kcore.gpio as G
import kcore.neo as N
import kcore.varz as V
# circuitpy_sim
import board
CIRCUITPYTHON = 'boot_out.txt' in os.listdir('/')
# ---------- handlers
WEB_HANDLERS = {
'/context': lambda request: request.context.get('c'),
'/get': lambda request: request.get_params.get('g'),
'/hi': lambda request: 'hello world',
'/hi2': lambda request: H.wrap('hello world', 'p'),
'/kb1': lambda request: str(request.context.get('kb1').value()),
'/logfun': lambda request: logfun(request),
r'/match/(\w+)': lambda request: request.route_match_groups[0],
'/neoflash': lambda request: neoflash(request),
'/ra': lambda request: str(request.remote_address),
'/vset': lambda request: vset(request),
}
def logfun(request):
C.clear_log() # in-case it's gotten too long, and just to make sure it works.
C.log('logfun')
return 'ok'
def neoflash(request):
neo = request.context.get('neo')
neo[0] = N.RED
time.sleep(0.2)
neo[0] = N.GREEN
time.sleep(0.2)
neo[0] = N.PURPLE
time.sleep(0.2)
neo[0] = N.OFF
return 'ok'
def vset(request):
for k, v in request.get_params.items(): V.set(k, v)
return str(len(request.get_params))
# ---------- main
def create_ws(port):
G.init()
kb1 = G.KButton(board.D0, name='D0', background=not CIRCUITPYTHON)
neo = N.Neo(n=1, pin=board.NEOPIXEL)
ctx = {'c': 'hello', 'kb1': kb1, 'neo': neo}
ws = W.WebServer(WEB_HANDLERS, wrap_handlers=False, port=port, blocking=True, context=ctx)
return ws
# This part only runsif this file is main.py on real CircuitPy hardware.
# (when running locally, the test calls create_ws() direclty.
def main():
try:
import wifi_secrets as S
print(f'{time.time()}: connecting to wifi...')
W.connect_wifi(S.DHCP_HOSTNAME, S.SSID, S.WIFI_PASSWORD)
except Exception as e:
print('Unable to connect to wifi; skipping: ' + str(e), file=sys.stderr)
ws = create_ws(port=8080)
print(f'{time.time()}: starting web server')
while True:
status = ws.listen()
print(f'{time.time()}: main loop; status={status}')
time.sleep(0.3) # Don't loop too fast...
if __name__ == '__main__':
main() | 0.2414 | 0.071526 |
import numpy as np
import matplotlib.pyplot as plt
import accretion_code as ac
import file_tools as flt
from scipy.interpolate import interp1d
import dedalus.public as de
import file_tools as flt
def mag(x): return np.log10(np.abs(x)+1e-16)
import mpmath as mp
li2_obj = np.frompyfunc(lambda x: float(mp.polylog(2,x)),1,1)
li2 = lambda y: li2_obj(y).astype(float)
# stability diagrams
filename = 'regime-curves.h5'
curves = {}
for curve in flt.get_keys(filename):
curves[curve] = {'l':flt.load_data(filename,'l',group=curve)[0],
'g':flt.load_data(filename,'g',group=curve)[0]}
curve_splines = {curve: interp1d(curves[curve]['l'], curves[curve]['g']) for curve in curves}
fracbasis = de.Chebyshev('s',12,interval=(0,1))
fracs = fracbasis.grid()
c0 = curve_splines['equal-shock']
c1 = curve_splines['tangent-shock']
ls = np.linspace(0.2, 1.3, 20)
gs0 = c0(ls)
gs1 = c1(ls)
gs = gs0[:,None] + (gs1 - gs0)[:,None]*fracs[None,:]
# shock location and magnitude
dics = {}
ur0_rs = {}
for i in range(len(ls)):
for j in range(gs.shape[1]):
print(i,j)
li = ls[i]
gij = gs[i,j]
dics[i,j] = ac.stability(li,gij,out=False)
# growth rate calculation
i, j = 1,1
dic = dics[i, j]
λ1s = np.zeros(gs.shape)
λ2s = np.zeros(gs.shape)
avals = np.zeros(gs.shape)
for i in range(gs.shape[0]):
for j in range(gs.shape[1]):
l, g = ls[i], gs[i,j]
λ1s[i,j] = dics[i,j]['λ_s1']
λ2s[i,j] = dics[i,j]['λ_s2']
from scipy.interpolate import RectBivariateSpline
λ1_spline = RectBivariateSpline(ls, fracs, λ1s)
λ2_spline = RectBivariateSpline(ls, fracs, λ2s)
ls_high = np.linspace(.2,1.3,100)
fracs_high = np.linspace(.005,.995,100)
λ1s_high = λ1_spline(ls_high, fracs_high)
λ2s_high = λ2_spline(ls_high, fracs_high)
import matplotlib.colors as colors
frac = np.linspace(0,1,gs.shape[1],endpoint=False)
fig, ax = plt.subplots(1,2,gridspec_kw={'wspace':0},figsize=(6,2.5))
p1 = ax[0].pcolormesh(ls_high, fracs_high, λ1s_high.T,
norm=colors.SymLogNorm(linthresh=0.1, linscale=1.,
vmin=-2000, vmax=2000, base=10),
shading='nearest',cmap='RdBu_r')
ax[0].contour(ls_high, fracs_high, np.log10(np.abs(λ1s_high.T)),[-1,0,1,2,3],colors='k',linestyles='-')
p2 = ax[1].pcolormesh(ls_high, fracs_high, λ2s_high.T,
norm=colors.SymLogNorm(linthresh=0.1, linscale=1.,
vmin=-2000, vmax=2000, base=10),
shading='nearest',cmap='RdBu_r')
ax[1].contour(ls_high, fracs_high, np.log10(np.abs(λ2s_high.T)),[-1,0,1,2,3],colors='k',linestyles='-')
ax[0].set(xlabel='$\ell$',title='Inner shock')
ax[0].set_ylabel('$\\frac{r_h - r_{h,1}(\ell)}{r_{h,2}(\ell) - r_{h,1}(\ell)}$',fontsize=15)
ax[1].set(xlabel='$\ell$',yticks=[],title='Outer shock')
fig.suptitle('Asymptotic growth/decay rate $\lambda(\ell, r_h)$',y=1.08)
plt.colorbar(p2,ax=ax)
plt.savefig('figures/black-hole-shock-stability-regimes.png',bbox_inches='tight',dpi=500)
# finite eps regimes
def discriminant(l, g):
return 32 * l**6 * g**3 - 32 * l**8 * g**3 - 432 * l**4 * g**4 \
+ 560* l**6 * g**4 - 1440 * l**4 * g**5 - 96* l**6*g**5 \
- 1184*l**4*g**6 - 96*l**4*g**7 - 16*l**2*g**8 - 32*l**2*g**9
def sonic_points(l, g):
coeff_list = [1, -2*(1+g), l**2 + g**2, -2*l**2*g, (l*g)**2]
return np.roots(coeff_list).astype(complex).real[:-1][::-1]
def sonic_energy(l, g, rs):
return ac.newton(lambda e: ac.f(rs,-1,e,l,g), lambda e:ac.fe(rs,-1,e,l,g), 0)
def log_min_u1_estimate(l, g, r0, e):
return .5*(l/r0)**2 - 2/(r0-g) - np.log(r0) - e
def min_u1(l, g, r0, e1, u1):
return ac.newton(lambda u: ac.f(r0, u, e1, l, g),
lambda u: ac.fu(r0, u, e1, l, g),
u1,
bounds=[-1, -0],)
def max_u2(l, g, r0, e2):
return ac.newton(lambda u: ac.f(r0, u, e2, l, g),
lambda u: ac.fu(r0, u, e2, l, g),
-1.1,
bounds=[-np.inf, -1],)
def r_crit_u1(l, g, e1, r0, r2):
return ac.newton(lambda r: ac.f(r, -1, e1, l, g),
lambda r: ac.fr(r, -1, e1, l, g),
.5*(r0+r2),
bounds=[r0,r2])
def find_shock(l, g, e1, e2, r0, rcrit, out=False):
u10, u20 = -.9, -1.1
u1f = lambda r: ac.newton(lambda u: ac.f(r, u, e1, l, g),
lambda u: ac.fu(r, u, e1, l, g),
u10,
bounds=[-1, 0], out=' u1' if out else None, x_symb='u1')
u2f = lambda r: ac.newton(lambda u: ac.f(r, u, e2, l, g),
lambda u: ac.fu(r, u, e2, l, g),
u20,
bounds=[-np.inf, -1], out=' u2' if out else None, x_symb='u2')
u10 = u1f(rcrit*.99)
u20 = u2f(rcrit*.99)
def dr_gap(r):
nonlocal u10
nonlocal u20
u1, u2 = u10, u20 = u1f(r), u2f(r)
diff = u1 - 1/u2
dru1 = -ac.fr(r, u1, e1, l, g)/ac.fu(r, u1, e1, l, g)
dru2 = -ac.fr(r, u2, e2, l, g)/ac.fu(r, u2, e2, l, g)
grad = dru1 + dru2/u2**2
return grad
return ac.newton(lambda r: u1f(r) - 1/u2f(r),
dr_gap,
rcrit*(1-1e-5),
bounds=[r0, rcrit], out=out, x_symb='r', f_symb='Δu')
def u0_vec(r, e1, l, g, out=False):
u10 = -.99
us = np.zeros(r.shape)
def u1f(r):
nonlocal u10
return ac.newton(lambda u: ac.f(r, u, e1, l, g),
lambda u: ac.fu(r, u, e1, l, g),
u10,
bounds=[-1, 0], out=' u1' if out else None, x_symb='u1',
xatol=1e-14, fatol=1e-14, xrtol=1e-14)
for i, ri in enumerate(r):
us[i] = u10 = u1f(ri)
return us
def u1_r0(l, g, r1, r0, rs2, e1, e2, nr=128, out=False):
rbasis = de.Chebyshev('r',nr,interval=(r1, rs2))
domain = de.Domain([rbasis], grid_dtype=np.float64)
r, = domain.grids()
u0s = u0_vec(r, e1, l, g)
u0, l1, rf = domain.new_fields(3)
rf['g'] = r
u0['g'] = u0s
ρinf = np.exp(e2)
ρ0s = -1/(r*u0s)
l1['g'] = 2*l*(ρ0s - ρinf)
problem = de.LBVP(domain, variables=['u1'])
problem.parameters['l'] = l
problem.parameters['g'] = g
problem.parameters['l1'] = l1
problem.parameters['u0'] = u0
problem.parameters['e1'] = e1
problem.substitutions['res_u0'] = '(u0**2 + (l/r)**2)/2 - 2/(r-g) - log(-r*u0) - e1'
problem.substitutions['res_u1'] = 'dr((u0-1/u0)*u1)/2 - (dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3)'
# problem.substitutions['res'] = '((u0-1/u0)*dr(u1) + (1 + 1/u0**2)*dr(u0)*u1)/2 - (dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3)'
problem.substitutions['rhs'] = 'dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3'
problem.add_equation('dr((u0-1/u0)*u1)/2 = dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3')
# problem.add_equation('((u0-1/u0)*dr(u1) + (1 + 1/u0**2)*dr(u0)*u1)/2 = dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3')
problem.add_bc('left(dr(u0))*left(u1) = left(dr(dr(u0)) + dr(u0)**2 + 1/r**2 + l*l1/r**3)')
solver = problem.build_solver()
solver.solve()
u1 = solver.state['u1']
ratio = u1.interpolate(r='right')['g'][0]/u0.interpolate(r='right')['g'][0]
if out:
rhs = solver.evaluator.vars['rhs'].evaluate()
res_u0 = solver.evaluator.vars['res_u0'].evaluate()
res_u1 = solver.evaluator.vars['res_u1'].evaluate()
return {'r':rf, 'u0':u0, 'l1':l1, 'ρ0':ρ0s, 'u1':u1, 'rhs':rhs, 'res_u0':res_u0, 'res_u1':res_u1, 'ratio':ratio}
else: return u1.interpolate(r='right')['g'][0]/u0.interpolate(r='right')['g'][0]
from scipy.optimize import brentq
def find_equal_energy(g):
ls = np.linspace(0, .3)
discs = discriminant(ls,g)
leftmost = ls[np.where(discs < 0)[0][-1]]
def energy_gap(l):
r1, r0, r2 = sonic_points(l, g)
e1, e2 = sonic_energy(l, g, r1), sonic_energy(l, g, r2)
return e1 - e2
return brentq(energy_gap, leftmost, .3)
def check_crossings(l, g,out=False, nr=128):
dic = {}
dic['disc'] = disc = discriminant(l, g)
if disc < 0: return dic
dic['r1'],dic['r0'],dic['r2'] = r1, r0, r2 = sonic_points(l, g)
dic['e1'] = e1 = sonic_energy(l, g, r1)
dic['e2'] = e2 = sonic_energy(l, g, r2)
dic['e0'] = e0 = sonic_energy(l, g, r0)
if e1 > e2: return dic
dic['log_u1_min_0'] = log_u1_min_0 = log_min_u1_estimate(l, g, r0, e1)
if log_u1_min_0 > -20: dic['u1_min'] = u1_min = min_u1(l, g, r0, e1, -np.exp(log_u1_min_0))
else: dic['u1_min'] = u1_min = -np.exp(log_u1_min_0)
dic['u2_max'] = u2_max = max_u2(l, g, r0, e2)
dic['r_crit_u1'] = rcrit = r_crit_u1(l, g, e1, r0, r2)
dic['crossing'] = u1_min - 1/u2_max
if dic['crossing'] > 0:
dic['rs2'] = rs2 = find_shock(l, g, e1, e2, r0, rcrit)
try: dic['u1_r0'] = u1 = u1_r0(l, g, r1, r0, rs2, e1, e2, out=out, nr=nr)
except Exception: pass
return dic
ls = np.linspace(0,.3,501)[1:]
gs = np.linspace(0,5e-3,21)[1:]
# a = ac.Accretion(ls[11],gs[0])
# a.plot()
Δs = discriminant(ls[:,None], gs[None,:])
# g = r_h
# Δs = discriminant(ls, g)
dics = {}
for j, g in enumerate(gs):
for i, l in enumerate(ls):
if Δs[i,j] > 0:
# print(i, j, f'{l:.3f}')
dics[i,j] = check_crossings(l, g, out=True)
for key, dic in dics.items():
if dic.get('crossing',-1) > 0:
if 'u1_r0' in dic:
print(key, dic['u1_r0']['ratio'])
zeros = np.zeros((len(ls), len(gs)))
shocks = zeros.copy()
ratios = zeros.copy()
for i, l in enumerate(ls):
for j, g in enumerate(gs):
if dics.get((i,j)):
shocks[i,j] = dics[i,j].get('crossing',np.nan)
if dics[i,j].get('crossing',-1) > 0 and ('u1_r0' in dics[i,j]):
ratio = dics[i,j]['u1_r0']['ratio']
if ratio > 0: ratio = np.nan
ratios[i, j] = ratio
ls_dic = {}
ls_dic['three-sonics'] = [ls[np.where(Δs[:,j] > 0)[0][0]] for j in range(len(gs))]
ls_dic['tangent'] = [ls[np.where((shocks[:,j]>0) & np.isfinite(shocks[:,j]))[0][0]] for j in range(len(gs))]
for mag in range(0, 30, 5):
ls_dic[f'min-u1-{mag}'] = [ls[(np.where(np.log(-ratios[:,j]) > mag)[0][0])] for j in range(len(gs))]
ls_dic['collision'] = [find_equal_energy(g) for g in gs]
g0 = 1e-4
l0 = brentq(lambda l: discriminant(l, 1e-4), 0.01, .1)
l1 = find_equal_energy(1e-4)
gs2 = np.linspace(0,5e-3,21)
discs2 = discriminant(ls[:,None], gs2[None,:])
fig, ax = plt.subplots(figsize=(6,4))
ax.plot([l0]+ls_dic['three-sonics'], [g0]+list(gs), 'C4', label='Three sonic points',zorder=11)
ax.plot(ls_dic['tangent'], gs, 'C0', label='Shock tangency',zorder=10)
ax.plot(ls_dic['min-u1-15'], gs, 'C2', label='$ε = 10^{-15}$ breakdown',zorder=9)
ax.plot(ls_dic['min-u1-20'], gs, 'C1', label='$ε = 10^{-20}$ breakdown',zorder=8)
ax.plot(ls_dic['min-u1-25'], gs, 'C3', label='$ε = 10^{-25}$ breakdown',zorder=7)
ax.plot([l1]+ls_dic['collision'], [g0]+list(gs), 'C5', label='Shock-sonic collision',zorder=6)
ax.contourf(ls, gs2, discs2.T, np.arange(-2e-10,2e-10,1e-11), cmap='RdBu_r')
ax.set_facecolor('k')
ax.legend(frameon=False)
ax.set(xlim=[0.05,0.28],ylim=[0.000,0.005],
xlabel='Angular momentum $\ell$',
ylabel='Horizon scale $r_h$',
title='Narrow shock regimes for small $r_h$')
plt.savefig('figures/black-hole-small-rh-asymptotic-breakdown-regimes.png',dpi=400) | stability_diagrams.py | import numpy as np
import matplotlib.pyplot as plt
import accretion_code as ac
import file_tools as flt
from scipy.interpolate import interp1d
import dedalus.public as de
import file_tools as flt
def mag(x): return np.log10(np.abs(x)+1e-16)
import mpmath as mp
li2_obj = np.frompyfunc(lambda x: float(mp.polylog(2,x)),1,1)
li2 = lambda y: li2_obj(y).astype(float)
# stability diagrams
filename = 'regime-curves.h5'
curves = {}
for curve in flt.get_keys(filename):
curves[curve] = {'l':flt.load_data(filename,'l',group=curve)[0],
'g':flt.load_data(filename,'g',group=curve)[0]}
curve_splines = {curve: interp1d(curves[curve]['l'], curves[curve]['g']) for curve in curves}
fracbasis = de.Chebyshev('s',12,interval=(0,1))
fracs = fracbasis.grid()
c0 = curve_splines['equal-shock']
c1 = curve_splines['tangent-shock']
ls = np.linspace(0.2, 1.3, 20)
gs0 = c0(ls)
gs1 = c1(ls)
gs = gs0[:,None] + (gs1 - gs0)[:,None]*fracs[None,:]
# shock location and magnitude
dics = {}
ur0_rs = {}
for i in range(len(ls)):
for j in range(gs.shape[1]):
print(i,j)
li = ls[i]
gij = gs[i,j]
dics[i,j] = ac.stability(li,gij,out=False)
# growth rate calculation
i, j = 1,1
dic = dics[i, j]
λ1s = np.zeros(gs.shape)
λ2s = np.zeros(gs.shape)
avals = np.zeros(gs.shape)
for i in range(gs.shape[0]):
for j in range(gs.shape[1]):
l, g = ls[i], gs[i,j]
λ1s[i,j] = dics[i,j]['λ_s1']
λ2s[i,j] = dics[i,j]['λ_s2']
from scipy.interpolate import RectBivariateSpline
λ1_spline = RectBivariateSpline(ls, fracs, λ1s)
λ2_spline = RectBivariateSpline(ls, fracs, λ2s)
ls_high = np.linspace(.2,1.3,100)
fracs_high = np.linspace(.005,.995,100)
λ1s_high = λ1_spline(ls_high, fracs_high)
λ2s_high = λ2_spline(ls_high, fracs_high)
import matplotlib.colors as colors
frac = np.linspace(0,1,gs.shape[1],endpoint=False)
fig, ax = plt.subplots(1,2,gridspec_kw={'wspace':0},figsize=(6,2.5))
p1 = ax[0].pcolormesh(ls_high, fracs_high, λ1s_high.T,
norm=colors.SymLogNorm(linthresh=0.1, linscale=1.,
vmin=-2000, vmax=2000, base=10),
shading='nearest',cmap='RdBu_r')
ax[0].contour(ls_high, fracs_high, np.log10(np.abs(λ1s_high.T)),[-1,0,1,2,3],colors='k',linestyles='-')
p2 = ax[1].pcolormesh(ls_high, fracs_high, λ2s_high.T,
norm=colors.SymLogNorm(linthresh=0.1, linscale=1.,
vmin=-2000, vmax=2000, base=10),
shading='nearest',cmap='RdBu_r')
ax[1].contour(ls_high, fracs_high, np.log10(np.abs(λ2s_high.T)),[-1,0,1,2,3],colors='k',linestyles='-')
ax[0].set(xlabel='$\ell$',title='Inner shock')
ax[0].set_ylabel('$\\frac{r_h - r_{h,1}(\ell)}{r_{h,2}(\ell) - r_{h,1}(\ell)}$',fontsize=15)
ax[1].set(xlabel='$\ell$',yticks=[],title='Outer shock')
fig.suptitle('Asymptotic growth/decay rate $\lambda(\ell, r_h)$',y=1.08)
plt.colorbar(p2,ax=ax)
plt.savefig('figures/black-hole-shock-stability-regimes.png',bbox_inches='tight',dpi=500)
# finite eps regimes
def discriminant(l, g):
return 32 * l**6 * g**3 - 32 * l**8 * g**3 - 432 * l**4 * g**4 \
+ 560* l**6 * g**4 - 1440 * l**4 * g**5 - 96* l**6*g**5 \
- 1184*l**4*g**6 - 96*l**4*g**7 - 16*l**2*g**8 - 32*l**2*g**9
def sonic_points(l, g):
coeff_list = [1, -2*(1+g), l**2 + g**2, -2*l**2*g, (l*g)**2]
return np.roots(coeff_list).astype(complex).real[:-1][::-1]
def sonic_energy(l, g, rs):
return ac.newton(lambda e: ac.f(rs,-1,e,l,g), lambda e:ac.fe(rs,-1,e,l,g), 0)
def log_min_u1_estimate(l, g, r0, e):
return .5*(l/r0)**2 - 2/(r0-g) - np.log(r0) - e
def min_u1(l, g, r0, e1, u1):
return ac.newton(lambda u: ac.f(r0, u, e1, l, g),
lambda u: ac.fu(r0, u, e1, l, g),
u1,
bounds=[-1, -0],)
def max_u2(l, g, r0, e2):
return ac.newton(lambda u: ac.f(r0, u, e2, l, g),
lambda u: ac.fu(r0, u, e2, l, g),
-1.1,
bounds=[-np.inf, -1],)
def r_crit_u1(l, g, e1, r0, r2):
return ac.newton(lambda r: ac.f(r, -1, e1, l, g),
lambda r: ac.fr(r, -1, e1, l, g),
.5*(r0+r2),
bounds=[r0,r2])
def find_shock(l, g, e1, e2, r0, rcrit, out=False):
u10, u20 = -.9, -1.1
u1f = lambda r: ac.newton(lambda u: ac.f(r, u, e1, l, g),
lambda u: ac.fu(r, u, e1, l, g),
u10,
bounds=[-1, 0], out=' u1' if out else None, x_symb='u1')
u2f = lambda r: ac.newton(lambda u: ac.f(r, u, e2, l, g),
lambda u: ac.fu(r, u, e2, l, g),
u20,
bounds=[-np.inf, -1], out=' u2' if out else None, x_symb='u2')
u10 = u1f(rcrit*.99)
u20 = u2f(rcrit*.99)
def dr_gap(r):
nonlocal u10
nonlocal u20
u1, u2 = u10, u20 = u1f(r), u2f(r)
diff = u1 - 1/u2
dru1 = -ac.fr(r, u1, e1, l, g)/ac.fu(r, u1, e1, l, g)
dru2 = -ac.fr(r, u2, e2, l, g)/ac.fu(r, u2, e2, l, g)
grad = dru1 + dru2/u2**2
return grad
return ac.newton(lambda r: u1f(r) - 1/u2f(r),
dr_gap,
rcrit*(1-1e-5),
bounds=[r0, rcrit], out=out, x_symb='r', f_symb='Δu')
def u0_vec(r, e1, l, g, out=False):
u10 = -.99
us = np.zeros(r.shape)
def u1f(r):
nonlocal u10
return ac.newton(lambda u: ac.f(r, u, e1, l, g),
lambda u: ac.fu(r, u, e1, l, g),
u10,
bounds=[-1, 0], out=' u1' if out else None, x_symb='u1',
xatol=1e-14, fatol=1e-14, xrtol=1e-14)
for i, ri in enumerate(r):
us[i] = u10 = u1f(ri)
return us
def u1_r0(l, g, r1, r0, rs2, e1, e2, nr=128, out=False):
rbasis = de.Chebyshev('r',nr,interval=(r1, rs2))
domain = de.Domain([rbasis], grid_dtype=np.float64)
r, = domain.grids()
u0s = u0_vec(r, e1, l, g)
u0, l1, rf = domain.new_fields(3)
rf['g'] = r
u0['g'] = u0s
ρinf = np.exp(e2)
ρ0s = -1/(r*u0s)
l1['g'] = 2*l*(ρ0s - ρinf)
problem = de.LBVP(domain, variables=['u1'])
problem.parameters['l'] = l
problem.parameters['g'] = g
problem.parameters['l1'] = l1
problem.parameters['u0'] = u0
problem.parameters['e1'] = e1
problem.substitutions['res_u0'] = '(u0**2 + (l/r)**2)/2 - 2/(r-g) - log(-r*u0) - e1'
problem.substitutions['res_u1'] = 'dr((u0-1/u0)*u1)/2 - (dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3)'
# problem.substitutions['res'] = '((u0-1/u0)*dr(u1) + (1 + 1/u0**2)*dr(u0)*u1)/2 - (dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3)'
problem.substitutions['rhs'] = 'dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3'
problem.add_equation('dr((u0-1/u0)*u1)/2 = dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3')
# problem.add_equation('((u0-1/u0)*dr(u1) + (1 + 1/u0**2)*dr(u0)*u1)/2 = dr(dr(u0)) - dr(u0)**2/u0 - u0/r**2 + l*l1/r**3')
problem.add_bc('left(dr(u0))*left(u1) = left(dr(dr(u0)) + dr(u0)**2 + 1/r**2 + l*l1/r**3)')
solver = problem.build_solver()
solver.solve()
u1 = solver.state['u1']
ratio = u1.interpolate(r='right')['g'][0]/u0.interpolate(r='right')['g'][0]
if out:
rhs = solver.evaluator.vars['rhs'].evaluate()
res_u0 = solver.evaluator.vars['res_u0'].evaluate()
res_u1 = solver.evaluator.vars['res_u1'].evaluate()
return {'r':rf, 'u0':u0, 'l1':l1, 'ρ0':ρ0s, 'u1':u1, 'rhs':rhs, 'res_u0':res_u0, 'res_u1':res_u1, 'ratio':ratio}
else: return u1.interpolate(r='right')['g'][0]/u0.interpolate(r='right')['g'][0]
from scipy.optimize import brentq
def find_equal_energy(g):
ls = np.linspace(0, .3)
discs = discriminant(ls,g)
leftmost = ls[np.where(discs < 0)[0][-1]]
def energy_gap(l):
r1, r0, r2 = sonic_points(l, g)
e1, e2 = sonic_energy(l, g, r1), sonic_energy(l, g, r2)
return e1 - e2
return brentq(energy_gap, leftmost, .3)
def check_crossings(l, g,out=False, nr=128):
dic = {}
dic['disc'] = disc = discriminant(l, g)
if disc < 0: return dic
dic['r1'],dic['r0'],dic['r2'] = r1, r0, r2 = sonic_points(l, g)
dic['e1'] = e1 = sonic_energy(l, g, r1)
dic['e2'] = e2 = sonic_energy(l, g, r2)
dic['e0'] = e0 = sonic_energy(l, g, r0)
if e1 > e2: return dic
dic['log_u1_min_0'] = log_u1_min_0 = log_min_u1_estimate(l, g, r0, e1)
if log_u1_min_0 > -20: dic['u1_min'] = u1_min = min_u1(l, g, r0, e1, -np.exp(log_u1_min_0))
else: dic['u1_min'] = u1_min = -np.exp(log_u1_min_0)
dic['u2_max'] = u2_max = max_u2(l, g, r0, e2)
dic['r_crit_u1'] = rcrit = r_crit_u1(l, g, e1, r0, r2)
dic['crossing'] = u1_min - 1/u2_max
if dic['crossing'] > 0:
dic['rs2'] = rs2 = find_shock(l, g, e1, e2, r0, rcrit)
try: dic['u1_r0'] = u1 = u1_r0(l, g, r1, r0, rs2, e1, e2, out=out, nr=nr)
except Exception: pass
return dic
ls = np.linspace(0,.3,501)[1:]
gs = np.linspace(0,5e-3,21)[1:]
# a = ac.Accretion(ls[11],gs[0])
# a.plot()
Δs = discriminant(ls[:,None], gs[None,:])
# g = r_h
# Δs = discriminant(ls, g)
dics = {}
for j, g in enumerate(gs):
for i, l in enumerate(ls):
if Δs[i,j] > 0:
# print(i, j, f'{l:.3f}')
dics[i,j] = check_crossings(l, g, out=True)
for key, dic in dics.items():
if dic.get('crossing',-1) > 0:
if 'u1_r0' in dic:
print(key, dic['u1_r0']['ratio'])
zeros = np.zeros((len(ls), len(gs)))
shocks = zeros.copy()
ratios = zeros.copy()
for i, l in enumerate(ls):
for j, g in enumerate(gs):
if dics.get((i,j)):
shocks[i,j] = dics[i,j].get('crossing',np.nan)
if dics[i,j].get('crossing',-1) > 0 and ('u1_r0' in dics[i,j]):
ratio = dics[i,j]['u1_r0']['ratio']
if ratio > 0: ratio = np.nan
ratios[i, j] = ratio
ls_dic = {}
ls_dic['three-sonics'] = [ls[np.where(Δs[:,j] > 0)[0][0]] for j in range(len(gs))]
ls_dic['tangent'] = [ls[np.where((shocks[:,j]>0) & np.isfinite(shocks[:,j]))[0][0]] for j in range(len(gs))]
for mag in range(0, 30, 5):
ls_dic[f'min-u1-{mag}'] = [ls[(np.where(np.log(-ratios[:,j]) > mag)[0][0])] for j in range(len(gs))]
ls_dic['collision'] = [find_equal_energy(g) for g in gs]
g0 = 1e-4
l0 = brentq(lambda l: discriminant(l, 1e-4), 0.01, .1)
l1 = find_equal_energy(1e-4)
gs2 = np.linspace(0,5e-3,21)
discs2 = discriminant(ls[:,None], gs2[None,:])
fig, ax = plt.subplots(figsize=(6,4))
ax.plot([l0]+ls_dic['three-sonics'], [g0]+list(gs), 'C4', label='Three sonic points',zorder=11)
ax.plot(ls_dic['tangent'], gs, 'C0', label='Shock tangency',zorder=10)
ax.plot(ls_dic['min-u1-15'], gs, 'C2', label='$ε = 10^{-15}$ breakdown',zorder=9)
ax.plot(ls_dic['min-u1-20'], gs, 'C1', label='$ε = 10^{-20}$ breakdown',zorder=8)
ax.plot(ls_dic['min-u1-25'], gs, 'C3', label='$ε = 10^{-25}$ breakdown',zorder=7)
ax.plot([l1]+ls_dic['collision'], [g0]+list(gs), 'C5', label='Shock-sonic collision',zorder=6)
ax.contourf(ls, gs2, discs2.T, np.arange(-2e-10,2e-10,1e-11), cmap='RdBu_r')
ax.set_facecolor('k')
ax.legend(frameon=False)
ax.set(xlim=[0.05,0.28],ylim=[0.000,0.005],
xlabel='Angular momentum $\ell$',
ylabel='Horizon scale $r_h$',
title='Narrow shock regimes for small $r_h$')
plt.savefig('figures/black-hole-small-rh-asymptotic-breakdown-regimes.png',dpi=400) | 0.526586 | 0.535281 |
import re
import logging
class Scoring:
TYPE_REPLY = 'reply'
TYPE_LINKSHARE = 'link'
def __init__(self, lockservice, botservice, teamlinkservice, tweetservice, scoreservice, stats):
self.lockservice = lockservice
self.botservice = botservice
self.teamlinkservice = teamlinkservice
self.tweetservice = tweetservice
self.scoreservice = scoreservice
self.stats = stats
def main(self):
if not self.lockservice.acquire():
return
self.bots = self.botservice.get_bots()
self.links = self.teamlinkservice.get_links()
self.last_tweet_psqlid = self.scoreservice.get_last_score_ref_id()
tweets_entities = self.tweetservice.get_scoring_entities(self.bots, self.last_tweet_psqlid)
max_tweet_id = -1
for entity in tweets_entities:
max_tweet_id = max(max_tweet_id, int(entity['tweet_id']))
if entity['type'] == 'mention':
for bot in self.bots:
if bot['twitter_id'] == entity['text'].strip():
logging.info('bot %s scored a reply!', entity['text'])
self.stats.log_point('score.mention.' + str(bot['team_id']), entity['timestamp'])
self.scoreservice.score(bot['team_id'], bot['twitter_id'], self.TYPE_REPLY, entity['tweet_id'])
break
elif entity['type'] == 'url':
for link in self.links:
if link['link'].strip() == entity['text'].strip():
logging.info('team %s scored a retweet of %s on tweet id %s!', link['team_id'], entity['text'], entity['tweet_id'])
self.stats.log_point('score.link.' + str(link['team_id']), entity['timestamp'])
self.scoreservice.score(link['team_id'], None, self.TYPE_LINKSHARE, entity['tweet_id'])
break
self.scoreservice.mark_last_score_ref_id(max_tweet_id) | provision/src/scoring.py | import re
import logging
class Scoring:
TYPE_REPLY = 'reply'
TYPE_LINKSHARE = 'link'
def __init__(self, lockservice, botservice, teamlinkservice, tweetservice, scoreservice, stats):
self.lockservice = lockservice
self.botservice = botservice
self.teamlinkservice = teamlinkservice
self.tweetservice = tweetservice
self.scoreservice = scoreservice
self.stats = stats
def main(self):
if not self.lockservice.acquire():
return
self.bots = self.botservice.get_bots()
self.links = self.teamlinkservice.get_links()
self.last_tweet_psqlid = self.scoreservice.get_last_score_ref_id()
tweets_entities = self.tweetservice.get_scoring_entities(self.bots, self.last_tweet_psqlid)
max_tweet_id = -1
for entity in tweets_entities:
max_tweet_id = max(max_tweet_id, int(entity['tweet_id']))
if entity['type'] == 'mention':
for bot in self.bots:
if bot['twitter_id'] == entity['text'].strip():
logging.info('bot %s scored a reply!', entity['text'])
self.stats.log_point('score.mention.' + str(bot['team_id']), entity['timestamp'])
self.scoreservice.score(bot['team_id'], bot['twitter_id'], self.TYPE_REPLY, entity['tweet_id'])
break
elif entity['type'] == 'url':
for link in self.links:
if link['link'].strip() == entity['text'].strip():
logging.info('team %s scored a retweet of %s on tweet id %s!', link['team_id'], entity['text'], entity['tweet_id'])
self.stats.log_point('score.link.' + str(link['team_id']), entity['timestamp'])
self.scoreservice.score(link['team_id'], None, self.TYPE_LINKSHARE, entity['tweet_id'])
break
self.scoreservice.mark_last_score_ref_id(max_tweet_id) | 0.385259 | 0.074838 |
from __future__ import print_function, absolute_import
from reid.models import model_utils as mu
from reid.utils.data import data_process as dp
from reid.utils.serialization import save_checkpoint
from reid import datasets
from reid import models
from reid.config import Config
import numpy as np
import os
import argparse
parser = argparse.ArgumentParser(description='Cotrain args')
parser.add_argument('-s', '--seed', type=int, default=0)
args = parser.parse_args()
def self_train(configs, data, iter_step=1, train_ratio=0.2):
"""
cotrain model:
params:
model_names: model configs
data: dataset include train and untrain data
save_paths: paths for storing models
iter_step: maximum iteration steps
train_ratio: labeled data ratio
"""
assert iter_step >= 1
train_data, untrain_data = dp.split_dataset(
data.trainval, train_ratio, args.seed)
data_dir = data.images_dir
for view in range(len(configs)):
add_ratio = 0.5
new_train_data = train_data
new_untrain_data = untrain_data
for step in range(iter_step):
configs[view].set_training(True)
model = mu.train(new_train_data, data_dir, configs[view])
save_checkpoint({
'state_dict': model.state_dict(),
'epoch': step + 1,
'train_data': train_data}, False,
fpath=os.path.join(
configs[view].logs_dir, configs[view].model_name, 'self_train.epoch%d' % step)
)
# calculate predict probility on all data
p_b = mu.predict_prob(model, data.trainval, data_dir, configs[view])
p_y = np.argmax(p_b, axis=1)
t_y = [c for (_,c,_,_) in data.trainval]
print(np.mean(t_y == p_y))
if len(new_untrain_data) == 0:
break
pred_prob = mu.predict_prob(
model, new_untrain_data, data_dir, configs[view])
pred_y = np.argmax(pred_prob, axis=1)
add_id = dp.sel_idx(pred_prob, new_train_data, add_ratio)
new_train_data, new_untrain_data = dp.update_train_untrain(
add_id, new_train_data, new_untrain_data, pred_y)
config1 = Config()
config2 = Config(model_name='densenet121', height=224, width=224)
config3 = Config(model_name='resnet101', img_translation=2)
dataset = 'market1501std'
cur_path = os.getcwd()
logs_dir = os.path.join(cur_path, 'logs')
data_dir = os.path.join(cur_path, 'data', dataset)
data = datasets.create(dataset, data_dir)
# self_train([config1, config2, config3], data, 5)
self_train([config3], data, 5) | self_train.py | from __future__ import print_function, absolute_import
from reid.models import model_utils as mu
from reid.utils.data import data_process as dp
from reid.utils.serialization import save_checkpoint
from reid import datasets
from reid import models
from reid.config import Config
import numpy as np
import os
import argparse
parser = argparse.ArgumentParser(description='Cotrain args')
parser.add_argument('-s', '--seed', type=int, default=0)
args = parser.parse_args()
def self_train(configs, data, iter_step=1, train_ratio=0.2):
"""
cotrain model:
params:
model_names: model configs
data: dataset include train and untrain data
save_paths: paths for storing models
iter_step: maximum iteration steps
train_ratio: labeled data ratio
"""
assert iter_step >= 1
train_data, untrain_data = dp.split_dataset(
data.trainval, train_ratio, args.seed)
data_dir = data.images_dir
for view in range(len(configs)):
add_ratio = 0.5
new_train_data = train_data
new_untrain_data = untrain_data
for step in range(iter_step):
configs[view].set_training(True)
model = mu.train(new_train_data, data_dir, configs[view])
save_checkpoint({
'state_dict': model.state_dict(),
'epoch': step + 1,
'train_data': train_data}, False,
fpath=os.path.join(
configs[view].logs_dir, configs[view].model_name, 'self_train.epoch%d' % step)
)
# calculate predict probility on all data
p_b = mu.predict_prob(model, data.trainval, data_dir, configs[view])
p_y = np.argmax(p_b, axis=1)
t_y = [c for (_,c,_,_) in data.trainval]
print(np.mean(t_y == p_y))
if len(new_untrain_data) == 0:
break
pred_prob = mu.predict_prob(
model, new_untrain_data, data_dir, configs[view])
pred_y = np.argmax(pred_prob, axis=1)
add_id = dp.sel_idx(pred_prob, new_train_data, add_ratio)
new_train_data, new_untrain_data = dp.update_train_untrain(
add_id, new_train_data, new_untrain_data, pred_y)
config1 = Config()
config2 = Config(model_name='densenet121', height=224, width=224)
config3 = Config(model_name='resnet101', img_translation=2)
dataset = 'market1501std'
cur_path = os.getcwd()
logs_dir = os.path.join(cur_path, 'logs')
data_dir = os.path.join(cur_path, 'data', dataset)
data = datasets.create(dataset, data_dir)
# self_train([config1, config2, config3], data, 5)
self_train([config3], data, 5) | 0.590779 | 0.273828 |
from typing import List
from injector import inject
from pdip.integrator.connection.base import ConnectionSourceAdapter
from pdip.integrator.connection.types.sql.base import SqlProvider
from pdip.integrator.integration.domain.base import IntegrationBase
class SqlSourceAdapter(ConnectionSourceAdapter):
@inject
def __init__(self,
provider: SqlProvider,
):
self.provider = provider
def get_source_data_count(self, integration: IntegrationBase) -> int:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.Sql.Connection)
query = integration.SourceConnections.Sql.Query
if integration.SourceConnections.Sql.Query is None or integration.SourceConnections.Sql.Query == '':
schema = integration.SourceConnections.Sql.Schema
table = integration.SourceConnections.Sql.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
source_columns = integration.SourceConnections.Columns
if source_columns is not None and len(source_columns) > 0:
source_column_rows = [column.Name for column in source_columns]
columns_query = ",".join(source_column_rows)
query = source_context.dialect.get_table_select_query(selected_rows=columns_query, schema=schema, table=table)
else:
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data_count = source_context.get_table_count(query=query)
return data_count
def get_source_data(self, integration: IntegrationBase) -> List[any]:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.Sql.Connection)
query = integration.SourceConnections.Sql.Query
if integration.SourceConnections.Sql.Query is None or integration.SourceConnections.Sql.Query == '':
schema = integration.SourceConnections.Sql.Schema
table = integration.SourceConnections.Sql.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
source_columns = integration.SourceConnections.Columns
if source_columns is not None and len(source_columns) > 0:
source_column_rows = [column.Name for column in source_columns]
columns_query = ",".join(source_column_rows)
query = source_context.dialect.get_table_select_query(selected_rows=columns_query, schema=schema, table=table)
else:
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data = source_context.get_table_data(query=query)
return data
def get_source_data_with_paging(self, integration: IntegrationBase, start, end) -> List[any]:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.Sql.Connection)
query = integration.SourceConnections.Sql.Query
if integration.SourceConnections.Sql.Query is None or integration.SourceConnections.Sql.Query == '':
schema = integration.SourceConnections.Sql.Schema
table = integration.SourceConnections.Sql.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
source_columns = integration.SourceConnections.Columns
if source_columns is not None and len(source_columns) > 0:
source_column_rows = [column.Name for column in source_columns]
columns_query = ",".join(source_column_rows)
query = source_context.dialect.get_table_select_query(selected_rows=columns_query, schema=schema, table=table)
else:
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data = source_context.get_table_data_with_paging(
query=query,
start=start,
end=end
)
return data | pdip/integrator/connection/types/sql/adapters/source/sql_source_adapter.py | from typing import List
from injector import inject
from pdip.integrator.connection.base import ConnectionSourceAdapter
from pdip.integrator.connection.types.sql.base import SqlProvider
from pdip.integrator.integration.domain.base import IntegrationBase
class SqlSourceAdapter(ConnectionSourceAdapter):
@inject
def __init__(self,
provider: SqlProvider,
):
self.provider = provider
def get_source_data_count(self, integration: IntegrationBase) -> int:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.Sql.Connection)
query = integration.SourceConnections.Sql.Query
if integration.SourceConnections.Sql.Query is None or integration.SourceConnections.Sql.Query == '':
schema = integration.SourceConnections.Sql.Schema
table = integration.SourceConnections.Sql.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
source_columns = integration.SourceConnections.Columns
if source_columns is not None and len(source_columns) > 0:
source_column_rows = [column.Name for column in source_columns]
columns_query = ",".join(source_column_rows)
query = source_context.dialect.get_table_select_query(selected_rows=columns_query, schema=schema, table=table)
else:
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data_count = source_context.get_table_count(query=query)
return data_count
def get_source_data(self, integration: IntegrationBase) -> List[any]:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.Sql.Connection)
query = integration.SourceConnections.Sql.Query
if integration.SourceConnections.Sql.Query is None or integration.SourceConnections.Sql.Query == '':
schema = integration.SourceConnections.Sql.Schema
table = integration.SourceConnections.Sql.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
source_columns = integration.SourceConnections.Columns
if source_columns is not None and len(source_columns) > 0:
source_column_rows = [column.Name for column in source_columns]
columns_query = ",".join(source_column_rows)
query = source_context.dialect.get_table_select_query(selected_rows=columns_query, schema=schema, table=table)
else:
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data = source_context.get_table_data(query=query)
return data
def get_source_data_with_paging(self, integration: IntegrationBase, start, end) -> List[any]:
source_context = self.provider.get_context_by_config(
config=integration.SourceConnections.Sql.Connection)
query = integration.SourceConnections.Sql.Query
if integration.SourceConnections.Sql.Query is None or integration.SourceConnections.Sql.Query == '':
schema = integration.SourceConnections.Sql.Schema
table = integration.SourceConnections.Sql.ObjectName
if schema is None or schema == '' or table is None or table == '':
raise Exception(f"Source Schema and Table required. {schema}.{table}")
source_columns = integration.SourceConnections.Columns
if source_columns is not None and len(source_columns) > 0:
source_column_rows = [column.Name for column in source_columns]
columns_query = ",".join(source_column_rows)
query = source_context.dialect.get_table_select_query(selected_rows=columns_query, schema=schema, table=table)
else:
query = source_context.dialect.get_table_select_query(selected_rows='*', schema=schema, table=table)
data = source_context.get_table_data_with_paging(
query=query,
start=start,
end=end
)
return data | 0.647464 | 0.270336 |
"""Test class for functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy
from numpy import linalg
from numpy import testing
import tensorflow as tf
from prettytensor import functions
TOLERANCE = 0.00001
# Distance functions used in tests. These are defined here instead of using
# scipy so the open source tests don't depend on such a huge module for 3
# 1 line functions.
def cosine(u, v): # pylint: disable=invalid-name
return 1.0 - numpy.dot(u, v) / (linalg.norm(u, ord=2) * linalg.norm(v, ord=2))
def cityblock(u, v): # pylint: disable=invalid-name
return numpy.abs(u - v).sum()
def euclidean(u, v): # pylint: disable=invalid-name
return linalg.norm(u - v, ord=2)
class TensorFlowOpTest(tf.test.TestCase):
def eval_tensor(self, tensors):
if isinstance(tensors, tf.Tensor):
tensors = [tensors]
with self.test_session() as sess:
return sess.run(tensors)
def test_every_other(self):
tensor = tf.constant([[1, 2], [3, 4]])
out = self.eval_tensor(functions.every_other(tensor))
testing.assert_array_equal(out[0], numpy.array([1, 3], dtype=numpy.int32))
tensor = tf.constant([[1, 2, 3, 4]])
out = self.eval_tensor(functions.every_other(tensor))
testing.assert_array_equal(out[0], numpy.array([1, 3], dtype=numpy.int32))
def test_l1_regression_loss(self):
ftensor1 = tf.constant([1., 2., 3., 4.])
ftensor2 = tf.constant([5., 6., 7., -8.])
out = self.eval_tensor(functions.l1_regression_loss(ftensor1, ftensor2))
testing.assert_array_equal(out[0], numpy.array([4., 4., 4., 12.]))
def test_l2_sq_regression_loss(self):
ftensor1 = tf.constant([1., 2., 3., 4.])
ftensor2 = tf.constant([5., 6., 7., -8.])
out = self.eval_tensor(functions.l2_regression_sq_loss(ftensor1, ftensor2))
testing.assert_array_equal(out[0], numpy.array([16., 16., 16, 144]))
def test_l2_regression_loss(self):
ftensor1 = tf.constant([1., 2., 3., 4.])
ftensor2 = tf.constant([5., 6., 7., -8.])
out = self.eval_tensor(functions.l2_regression_loss(ftensor1, ftensor2))
testing.assert_allclose(
out[0],
numpy.array([4., 4., 4., 12.]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_binary_cross_entropy_loss_with_logits(self):
n1 = numpy.array([2., 3., 4., 5., -6., -7.], dtype=numpy.float32)
n2 = numpy.array([1., 1., 0., 0., 0., 1.], dtype=numpy.float32)
ftensor1 = tf.constant(n1)
ftensor2 = tf.constant(n2)
out = self.eval_tensor(functions.binary_cross_entropy_loss_with_logits(
ftensor1, ftensor2))
testing.assert_allclose(
out[0],
n1 * (1-n2) + numpy.log(1 + numpy.exp(-n1)),
rtol=0.00001)
def test_soft_plus(self):
# 100 overflows naive implementations in float
values = (
numpy.array(
[-100., -10., 1., 0, 1., 10., 100.],
dtype=numpy.float32))
out = self.eval_tensor(
functions.softplus(
tf.constant(
values,
dtype=tf.float32),
1.))
np_values = numpy.log(1. + numpy.exp(values))
np_values[6] = 100.
testing.assert_allclose(out[0], np_values, rtol=TOLERANCE, atol=TOLERANCE)
out = self.eval_tensor(functions.softplus(tf.constant(values), 2.))
np_values = numpy.log(1. + numpy.exp(values * 2.)) / 2.
np_values[6] = 100.
testing.assert_allclose(out[0], np_values, rtol=TOLERANCE, atol=TOLERANCE)
def test_cos_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.cos_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array([cosine(n1[0], n2[0]), cosine(n1[1], n2[1])]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l1_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l1_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(
[cityblock(n1[0], n2[0]), cityblock(n1[1], n2[1])
]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l2_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(
[euclidean(n1[0], n2[0]),
1e-6 # Epsilon sets the minimum distance so use that instead of 0.
]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l2_distance_sq(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance_sq(n1, n2))
testing.assert_allclose(
out[0],
numpy.power(
numpy.array(
[euclidean(n1[0], n2[0]), euclidean(
n1[1], n2[1])]), 2),
rtol=TOLERANCE, atol=TOLERANCE)
def test_dot_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.dot_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(-numpy.sum(n1 * n2,
axis=1)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_cos_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.cos_distance(n1, n2))
expected = numpy.array(
[[cosine(n1[0, 0], n2[0]), cosine(n1[0, 1], n2[1])],
[cosine(n1[1, 0], n2[0]), cosine(n1[1, 1], n2[1])]])
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_l1_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l1_distance(n1, n2))
expected = numpy.array(
[[cityblock(n1[0, 0], n2[0]), cityblock(
n1[0, 1], n2[1])], [cityblock(n1[1, 0], n2[0]),
cityblock(n1[1, 1], n2[1])]])
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_l2_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance(n1, n2))
expected = numpy.array(
[[euclidean(n1[0, 0], n2[0]), euclidean(
n1[0, 1], n2[1])], [euclidean(n1[1, 0], n2[0]),
euclidean(n1[1, 1], n2[1])]])
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_l2_distance_sq_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance_sq(n1, n2))
expected = numpy.array(
[[euclidean(n1[0, 0], n2[0]), euclidean(
n1[0, 1], n2[1])], [euclidean(n1[1, 0], n2[0]),
euclidean(n1[1, 1], n2[1])]])
expected = numpy.power(expected, 2)
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_dot_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.dot_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(-numpy.sum(n1 * n2,
axis=2)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l2_normalize(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(functions.l2_normalize(t1, 1))
testing.assert_allclose(
out[0],
n1 / linalg.norm(n1, 2, axis=1).reshape((2, 1)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l1_normalize(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(functions.l1_normalize(t1, 1))
testing.assert_allclose(
out[0],
n1 / linalg.norm(n1, 1, axis=1).reshape((2, 1)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_leaky_relu(self):
values = (
numpy.array(
[-100., -10., 1., 0, 1., 10., 100.],
dtype=numpy.float32))
tensor = tf.constant(values)
out = self.eval_tensor(functions.leaky_relu(tensor))
for i, value in enumerate(values):
if value < 0:
values[i] *= 0.01
testing.assert_allclose(out[0], values, rtol=TOLERANCE, atol=TOLERANCE)
def test_unzip(self):
n1 = numpy.array([[1., 2.], [3., 4.], [5., 6.], [7., 8.]],
dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(functions.unzip(t1, 0, 4, 2))
expected = numpy.array([[1., 2.], [5., 6.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[0], rtol=TOLERANCE, atol=TOLERANCE)
expected = numpy.array([[3., 4.], [7., 8.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[1], rtol=TOLERANCE, atol=TOLERANCE)
def test_split(self):
"""Testing TF functionality to highlight difference with Unzip."""
n1 = numpy.array([[1., 2.], [3., 4.], [5., 6.], [7., 8.]],
dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(tf.split(value=t1, num_or_size_splits=2, axis=0))
expected = numpy.array([[1., 2.], [3., 4.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[0], rtol=TOLERANCE, atol=TOLERANCE)
expected = numpy.array([[5., 6.], [7., 8.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[1], rtol=TOLERANCE, atol=TOLERANCE)
if __name__ == '__main__':
tf.test.main() | prettytensor/functions_test.py | """Test class for functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy
from numpy import linalg
from numpy import testing
import tensorflow as tf
from prettytensor import functions
TOLERANCE = 0.00001
# Distance functions used in tests. These are defined here instead of using
# scipy so the open source tests don't depend on such a huge module for 3
# 1 line functions.
def cosine(u, v): # pylint: disable=invalid-name
return 1.0 - numpy.dot(u, v) / (linalg.norm(u, ord=2) * linalg.norm(v, ord=2))
def cityblock(u, v): # pylint: disable=invalid-name
return numpy.abs(u - v).sum()
def euclidean(u, v): # pylint: disable=invalid-name
return linalg.norm(u - v, ord=2)
class TensorFlowOpTest(tf.test.TestCase):
def eval_tensor(self, tensors):
if isinstance(tensors, tf.Tensor):
tensors = [tensors]
with self.test_session() as sess:
return sess.run(tensors)
def test_every_other(self):
tensor = tf.constant([[1, 2], [3, 4]])
out = self.eval_tensor(functions.every_other(tensor))
testing.assert_array_equal(out[0], numpy.array([1, 3], dtype=numpy.int32))
tensor = tf.constant([[1, 2, 3, 4]])
out = self.eval_tensor(functions.every_other(tensor))
testing.assert_array_equal(out[0], numpy.array([1, 3], dtype=numpy.int32))
def test_l1_regression_loss(self):
ftensor1 = tf.constant([1., 2., 3., 4.])
ftensor2 = tf.constant([5., 6., 7., -8.])
out = self.eval_tensor(functions.l1_regression_loss(ftensor1, ftensor2))
testing.assert_array_equal(out[0], numpy.array([4., 4., 4., 12.]))
def test_l2_sq_regression_loss(self):
ftensor1 = tf.constant([1., 2., 3., 4.])
ftensor2 = tf.constant([5., 6., 7., -8.])
out = self.eval_tensor(functions.l2_regression_sq_loss(ftensor1, ftensor2))
testing.assert_array_equal(out[0], numpy.array([16., 16., 16, 144]))
def test_l2_regression_loss(self):
ftensor1 = tf.constant([1., 2., 3., 4.])
ftensor2 = tf.constant([5., 6., 7., -8.])
out = self.eval_tensor(functions.l2_regression_loss(ftensor1, ftensor2))
testing.assert_allclose(
out[0],
numpy.array([4., 4., 4., 12.]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_binary_cross_entropy_loss_with_logits(self):
n1 = numpy.array([2., 3., 4., 5., -6., -7.], dtype=numpy.float32)
n2 = numpy.array([1., 1., 0., 0., 0., 1.], dtype=numpy.float32)
ftensor1 = tf.constant(n1)
ftensor2 = tf.constant(n2)
out = self.eval_tensor(functions.binary_cross_entropy_loss_with_logits(
ftensor1, ftensor2))
testing.assert_allclose(
out[0],
n1 * (1-n2) + numpy.log(1 + numpy.exp(-n1)),
rtol=0.00001)
def test_soft_plus(self):
# 100 overflows naive implementations in float
values = (
numpy.array(
[-100., -10., 1., 0, 1., 10., 100.],
dtype=numpy.float32))
out = self.eval_tensor(
functions.softplus(
tf.constant(
values,
dtype=tf.float32),
1.))
np_values = numpy.log(1. + numpy.exp(values))
np_values[6] = 100.
testing.assert_allclose(out[0], np_values, rtol=TOLERANCE, atol=TOLERANCE)
out = self.eval_tensor(functions.softplus(tf.constant(values), 2.))
np_values = numpy.log(1. + numpy.exp(values * 2.)) / 2.
np_values[6] = 100.
testing.assert_allclose(out[0], np_values, rtol=TOLERANCE, atol=TOLERANCE)
def test_cos_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.cos_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array([cosine(n1[0], n2[0]), cosine(n1[1], n2[1])]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l1_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l1_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(
[cityblock(n1[0], n2[0]), cityblock(n1[1], n2[1])
]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l2_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(
[euclidean(n1[0], n2[0]),
1e-6 # Epsilon sets the minimum distance so use that instead of 0.
]),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l2_distance_sq(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance_sq(n1, n2))
testing.assert_allclose(
out[0],
numpy.power(
numpy.array(
[euclidean(n1[0], n2[0]), euclidean(
n1[1], n2[1])]), 2),
rtol=TOLERANCE, atol=TOLERANCE)
def test_dot_distance(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.dot_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(-numpy.sum(n1 * n2,
axis=1)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_cos_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.cos_distance(n1, n2))
expected = numpy.array(
[[cosine(n1[0, 0], n2[0]), cosine(n1[0, 1], n2[1])],
[cosine(n1[1, 0], n2[0]), cosine(n1[1, 1], n2[1])]])
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_l1_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l1_distance(n1, n2))
expected = numpy.array(
[[cityblock(n1[0, 0], n2[0]), cityblock(
n1[0, 1], n2[1])], [cityblock(n1[1, 0], n2[0]),
cityblock(n1[1, 1], n2[1])]])
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_l2_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance(n1, n2))
expected = numpy.array(
[[euclidean(n1[0, 0], n2[0]), euclidean(
n1[0, 1], n2[1])], [euclidean(n1[1, 0], n2[0]),
euclidean(n1[1, 1], n2[1])]])
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_l2_distance_sq_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.l2_distance_sq(n1, n2))
expected = numpy.array(
[[euclidean(n1[0, 0], n2[0]), euclidean(
n1[0, 1], n2[1])], [euclidean(n1[1, 0], n2[0]),
euclidean(n1[1, 1], n2[1])]])
expected = numpy.power(expected, 2)
testing.assert_allclose(expected, out[0], atol=TOLERANCE)
def test_dot_distance_with_broadcast(self):
n1 = numpy.array([[[1., 2., 3., 4.], [1., 1., 1., 1.]], [[5., 6., 7., 8.],
[1., 1., 1., 2.]]],
dtype=numpy.float32)
n2 = numpy.array([[5., 6., 7., -8.], [1., 1., 1., 1.]], dtype=numpy.float32)
out = self.eval_tensor(functions.dot_distance(n1, n2))
testing.assert_allclose(
out[0],
numpy.array(-numpy.sum(n1 * n2,
axis=2)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l2_normalize(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(functions.l2_normalize(t1, 1))
testing.assert_allclose(
out[0],
n1 / linalg.norm(n1, 2, axis=1).reshape((2, 1)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_l1_normalize(self):
n1 = numpy.array([[1., 2., 3., 4.], [1., 1., 1., 1.]], dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(functions.l1_normalize(t1, 1))
testing.assert_allclose(
out[0],
n1 / linalg.norm(n1, 1, axis=1).reshape((2, 1)),
rtol=TOLERANCE, atol=TOLERANCE)
def test_leaky_relu(self):
values = (
numpy.array(
[-100., -10., 1., 0, 1., 10., 100.],
dtype=numpy.float32))
tensor = tf.constant(values)
out = self.eval_tensor(functions.leaky_relu(tensor))
for i, value in enumerate(values):
if value < 0:
values[i] *= 0.01
testing.assert_allclose(out[0], values, rtol=TOLERANCE, atol=TOLERANCE)
def test_unzip(self):
n1 = numpy.array([[1., 2.], [3., 4.], [5., 6.], [7., 8.]],
dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(functions.unzip(t1, 0, 4, 2))
expected = numpy.array([[1., 2.], [5., 6.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[0], rtol=TOLERANCE, atol=TOLERANCE)
expected = numpy.array([[3., 4.], [7., 8.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[1], rtol=TOLERANCE, atol=TOLERANCE)
def test_split(self):
"""Testing TF functionality to highlight difference with Unzip."""
n1 = numpy.array([[1., 2.], [3., 4.], [5., 6.], [7., 8.]],
dtype=numpy.float32)
t1 = tf.constant(n1)
out = self.eval_tensor(tf.split(value=t1, num_or_size_splits=2, axis=0))
expected = numpy.array([[1., 2.], [3., 4.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[0], rtol=TOLERANCE, atol=TOLERANCE)
expected = numpy.array([[5., 6.], [7., 8.]], dtype=numpy.float32)
testing.assert_allclose(expected, out[1], rtol=TOLERANCE, atol=TOLERANCE)
if __name__ == '__main__':
tf.test.main() | 0.83025 | 0.57326 |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='import.proto',
package='installed_micro_app',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x0cimport.proto\x12\x13installed_micro_app\x1a\x1bgoogle/protobuf/empty.proto\"\xb7\x01\n\x15ImportMicroAppRequest\x12\r\n\x05\x61ppId\x18\x01 \x01(\t\x12\x10\n\x08homepage\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05owner\x18\x04 \x01(\t\x12\x16\n\x0estoryboardJson\x18\x05 \x01(\t\x12\x15\n\rinstallStatus\x18\x06 \x01(\t\x12\x10\n\x08internal\x18\x07 \x01(\t\x12\x0f\n\x07private\x18\x08 \x01(\t\x12\x0e\n\x06status\x18\t \x01(\t\"w\n\x1dImportMicroAppResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12$\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\x16.google.protobuf.Emptyb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_IMPORTMICROAPPREQUEST = _descriptor.Descriptor(
name='ImportMicroAppRequest',
full_name='installed_micro_app.ImportMicroAppRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='appId', full_name='installed_micro_app.ImportMicroAppRequest.appId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='homepage', full_name='installed_micro_app.ImportMicroAppRequest.homepage', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='installed_micro_app.ImportMicroAppRequest.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='owner', full_name='installed_micro_app.ImportMicroAppRequest.owner', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='storyboardJson', full_name='installed_micro_app.ImportMicroAppRequest.storyboardJson', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='installStatus', full_name='installed_micro_app.ImportMicroAppRequest.installStatus', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='internal', full_name='installed_micro_app.ImportMicroAppRequest.internal', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='installed_micro_app.ImportMicroAppRequest.private', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='installed_micro_app.ImportMicroAppRequest.status', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=67,
serialized_end=250,
)
_IMPORTMICROAPPRESPONSEWRAPPER = _descriptor.Descriptor(
name='ImportMicroAppResponseWrapper',
full_name='installed_micro_app.ImportMicroAppResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='installed_micro_app.ImportMicroAppResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='installed_micro_app.ImportMicroAppResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='installed_micro_app.ImportMicroAppResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='installed_micro_app.ImportMicroAppResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=252,
serialized_end=371,
)
_IMPORTMICROAPPRESPONSEWRAPPER.fields_by_name['data'].message_type = google_dot_protobuf_dot_empty__pb2._EMPTY
DESCRIPTOR.message_types_by_name['ImportMicroAppRequest'] = _IMPORTMICROAPPREQUEST
DESCRIPTOR.message_types_by_name['ImportMicroAppResponseWrapper'] = _IMPORTMICROAPPRESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ImportMicroAppRequest = _reflection.GeneratedProtocolMessageType('ImportMicroAppRequest', (_message.Message,), {
'DESCRIPTOR' : _IMPORTMICROAPPREQUEST,
'__module__' : 'import_pb2'
# @@protoc_insertion_point(class_scope:installed_micro_app.ImportMicroAppRequest)
})
_sym_db.RegisterMessage(ImportMicroAppRequest)
ImportMicroAppResponseWrapper = _reflection.GeneratedProtocolMessageType('ImportMicroAppResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _IMPORTMICROAPPRESPONSEWRAPPER,
'__module__' : 'import_pb2'
# @@protoc_insertion_point(class_scope:installed_micro_app.ImportMicroAppResponseWrapper)
})
_sym_db.RegisterMessage(ImportMicroAppResponseWrapper)
# @@protoc_insertion_point(module_scope) | micro_app_sdk/api/installed_micro_app/import_pb2.py |
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='import.proto',
package='installed_micro_app',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x0cimport.proto\x12\x13installed_micro_app\x1a\x1bgoogle/protobuf/empty.proto\"\xb7\x01\n\x15ImportMicroAppRequest\x12\r\n\x05\x61ppId\x18\x01 \x01(\t\x12\x10\n\x08homepage\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\r\n\x05owner\x18\x04 \x01(\t\x12\x16\n\x0estoryboardJson\x18\x05 \x01(\t\x12\x15\n\rinstallStatus\x18\x06 \x01(\t\x12\x10\n\x08internal\x18\x07 \x01(\t\x12\x0f\n\x07private\x18\x08 \x01(\t\x12\x0e\n\x06status\x18\t \x01(\t\"w\n\x1dImportMicroAppResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12$\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\x16.google.protobuf.Emptyb\x06proto3')
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_IMPORTMICROAPPREQUEST = _descriptor.Descriptor(
name='ImportMicroAppRequest',
full_name='installed_micro_app.ImportMicroAppRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='appId', full_name='installed_micro_app.ImportMicroAppRequest.appId', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='homepage', full_name='installed_micro_app.ImportMicroAppRequest.homepage', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='installed_micro_app.ImportMicroAppRequest.name', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='owner', full_name='installed_micro_app.ImportMicroAppRequest.owner', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='storyboardJson', full_name='installed_micro_app.ImportMicroAppRequest.storyboardJson', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='installStatus', full_name='installed_micro_app.ImportMicroAppRequest.installStatus', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='internal', full_name='installed_micro_app.ImportMicroAppRequest.internal', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='private', full_name='installed_micro_app.ImportMicroAppRequest.private', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='status', full_name='installed_micro_app.ImportMicroAppRequest.status', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=67,
serialized_end=250,
)
_IMPORTMICROAPPRESPONSEWRAPPER = _descriptor.Descriptor(
name='ImportMicroAppResponseWrapper',
full_name='installed_micro_app.ImportMicroAppResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='installed_micro_app.ImportMicroAppResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='installed_micro_app.ImportMicroAppResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='installed_micro_app.ImportMicroAppResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='installed_micro_app.ImportMicroAppResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=252,
serialized_end=371,
)
_IMPORTMICROAPPRESPONSEWRAPPER.fields_by_name['data'].message_type = google_dot_protobuf_dot_empty__pb2._EMPTY
DESCRIPTOR.message_types_by_name['ImportMicroAppRequest'] = _IMPORTMICROAPPREQUEST
DESCRIPTOR.message_types_by_name['ImportMicroAppResponseWrapper'] = _IMPORTMICROAPPRESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ImportMicroAppRequest = _reflection.GeneratedProtocolMessageType('ImportMicroAppRequest', (_message.Message,), {
'DESCRIPTOR' : _IMPORTMICROAPPREQUEST,
'__module__' : 'import_pb2'
# @@protoc_insertion_point(class_scope:installed_micro_app.ImportMicroAppRequest)
})
_sym_db.RegisterMessage(ImportMicroAppRequest)
ImportMicroAppResponseWrapper = _reflection.GeneratedProtocolMessageType('ImportMicroAppResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _IMPORTMICROAPPRESPONSEWRAPPER,
'__module__' : 'import_pb2'
# @@protoc_insertion_point(class_scope:installed_micro_app.ImportMicroAppResponseWrapper)
})
_sym_db.RegisterMessage(ImportMicroAppResponseWrapper)
# @@protoc_insertion_point(module_scope) | 0.218419 | 0.101947 |
"""Util class for job-related operations."""
from __future__ import print_function
import contextlib
import os
import time
from oslo_utils import uuidutils
from taskflow import engines
from taskflow import states
from taskflow.persistence import logbook
from artman.pipelines import pipeline_factory
from artman.utils import backend_helper
from artman.utils.logger import logger
# TODO(cbao): Include machine name
POSTER_NAME = "poster-%s" % os.getpid()
def post_remote_pipeline_job_and_wait(pipeline, jobboard_name):
"""Post a pipeline job and wait until it is finished."""
my_name = POSTER_NAME
logger.info("Starting poster with name: %s" % my_name)
persist_backend = backend_helper.default_persistence_backend()
with contextlib.closing(persist_backend):
with contextlib.closing(persist_backend.get_connection()) as conn:
conn.upgrade()
jobboard = backend_helper.get_jobboard(my_name, jobboard_name)
jobboard.connect()
with contextlib.closing(jobboard):
# Create information in the persistence backend about the
# unit of work we want to complete and the factory that
# can be called to create the tasks that the work unit needs
# to be done.
lb = logbook.LogBook("post-from-%s" % my_name)
flow_uuid = uuidutils.generate_uuid()
fd = logbook.FlowDetail("flow-of-%s" % my_name, flow_uuid)
lb.add(fd)
with contextlib.closing(persist_backend.get_connection()) as conn:
conn.save_logbook(lb)
engines.save_factory_details(fd,
pipeline_factory.make_pipeline_flow,
[pipeline.name, True],
pipeline.kwargs,
backend=persist_backend)
# Post, and be done with it!
jb = jobboard.post("job-from-%s" % my_name, book=lb)
logger.info('Posted: %s' % jb)
# TODO(cbao): Move wait until into a seperate method.
# TODO(lukesneeringer): ...and fix the logging.
state = states.UNCLAIMED
print('Job status: %s' % state)
while state != states.COMPLETE:
if (jb.state != state):
state = jb.state
print('Job status: %s' % state)
time.sleep(1)
return jb
def fetch_job_status(jb, jobboard_name):
result = []
my_name = POSTER_NAME
persist_backend = backend_helper.default_persistence_backend()
with contextlib.closing(persist_backend):
with contextlib.closing(persist_backend.get_connection()) as conn:
conn.upgrade()
jobboard = backend_helper.get_jobboard(my_name, jobboard_name)
jobboard.connect()
with contextlib.closing(jobboard):
with contextlib.closing(persist_backend.get_connection()) as conn:
for flow in jb.book:
flow_detail = conn.get_flow_details(flow.uuid)
result += flow_detail
return result, flow_detail | artman/utils/job_util.py | """Util class for job-related operations."""
from __future__ import print_function
import contextlib
import os
import time
from oslo_utils import uuidutils
from taskflow import engines
from taskflow import states
from taskflow.persistence import logbook
from artman.pipelines import pipeline_factory
from artman.utils import backend_helper
from artman.utils.logger import logger
# TODO(cbao): Include machine name
POSTER_NAME = "poster-%s" % os.getpid()
def post_remote_pipeline_job_and_wait(pipeline, jobboard_name):
"""Post a pipeline job and wait until it is finished."""
my_name = POSTER_NAME
logger.info("Starting poster with name: %s" % my_name)
persist_backend = backend_helper.default_persistence_backend()
with contextlib.closing(persist_backend):
with contextlib.closing(persist_backend.get_connection()) as conn:
conn.upgrade()
jobboard = backend_helper.get_jobboard(my_name, jobboard_name)
jobboard.connect()
with contextlib.closing(jobboard):
# Create information in the persistence backend about the
# unit of work we want to complete and the factory that
# can be called to create the tasks that the work unit needs
# to be done.
lb = logbook.LogBook("post-from-%s" % my_name)
flow_uuid = uuidutils.generate_uuid()
fd = logbook.FlowDetail("flow-of-%s" % my_name, flow_uuid)
lb.add(fd)
with contextlib.closing(persist_backend.get_connection()) as conn:
conn.save_logbook(lb)
engines.save_factory_details(fd,
pipeline_factory.make_pipeline_flow,
[pipeline.name, True],
pipeline.kwargs,
backend=persist_backend)
# Post, and be done with it!
jb = jobboard.post("job-from-%s" % my_name, book=lb)
logger.info('Posted: %s' % jb)
# TODO(cbao): Move wait until into a seperate method.
# TODO(lukesneeringer): ...and fix the logging.
state = states.UNCLAIMED
print('Job status: %s' % state)
while state != states.COMPLETE:
if (jb.state != state):
state = jb.state
print('Job status: %s' % state)
time.sleep(1)
return jb
def fetch_job_status(jb, jobboard_name):
result = []
my_name = POSTER_NAME
persist_backend = backend_helper.default_persistence_backend()
with contextlib.closing(persist_backend):
with contextlib.closing(persist_backend.get_connection()) as conn:
conn.upgrade()
jobboard = backend_helper.get_jobboard(my_name, jobboard_name)
jobboard.connect()
with contextlib.closing(jobboard):
with contextlib.closing(persist_backend.get_connection()) as conn:
for flow in jb.book:
flow_detail = conn.get_flow_details(flow.uuid)
result += flow_detail
return result, flow_detail | 0.346762 | 0.152884 |
import torch
from utils.geometry import get_neighbourhood_indices
class Translator(torch.nn.Module):
def __init__(self, config):
super(Translator, self).__init__()
self.config = config
try:
self.n_features = config.n_features - (1 - int(config.use_count_renderer))
except:
self.n_features = config.n_features
self.output_scale = config.RENDERER.output_scale
activation = eval(config.RENDERER.activation)
self.layer_context1 = torch.nn.Sequential(torch.nn.Linear((self.config.RENDERER.kernel ** 3) * self.n_features, self.n_features),
torch.nn.LayerNorm([self.n_features], elementwise_affine=False),
activation)
self.layer1 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + self.n_features, 32),
activation)
self.layer2 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + 32 , 16),
activation)
self.layer3 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + 16, 8),
activation)
self.layer4 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + 8, self.n_features),
activation)
if self.config.RENDERER.sdf_head:
self.sdf_head = torch.nn.Sequential(torch.nn.Linear(self.n_features, 1),
torch.nn.Tanh())
if self.config.RENDERER.occ_head:
self.occ_head = torch.nn.Sequential(torch.nn.Linear(self.n_features, 1),
torch.nn.Sigmoid())
self.padding = torch.nn.ReplicationPad3d(self.config.RENDERER.kernel // 2)
self.feature_dropout = torch.nn.Dropout2d(p=0.2)
self.relu = torch.nn.LeakyReLU()
self.tanh = torch.nn.Tanh()
self.sigmoid = torch.nn.Sigmoid()
self.hardtanh = torch.nn.Hardtanh(min_val=-0.06, max_val=0.06)
self.softsign = torch.nn.Softsign()
indices = []
for i in range(-1, 2):
for j in range(-1, 2):
for k in range(-1, 2):
indices.append(torch.Tensor([i, j, k]).view(1, 1, 3))
indices = torch.cat(indices, dim=0)
indices = indices.view(1, 27, 3)
self.index_shift = indices.int().clone()
# compute interpolation shift
indices = []
for i in range(0, 2):
for j in range(0, 2):
for k in range(0, 2):
indices.append(torch.Tensor([i, j, k]).view(1, 1, 3))
indices = torch.cat(indices, dim=0)
indices = indices.view(1, 8, 3)
self.interpolation_shift = indices.int().clone()
def forward(self, points, grid, padding=True):
if padding:
grid = self.padding(grid)
points = points + int(self.config.RENDERER.kernel // 2)
else:
pass
n_points = points.shape[0]
indices = points.floor()
df = 2. * (points - (indices + 0.5))
neighbourhood = get_neighbourhood_indices(indices, size=(self.config.RENDERER.kernel, self.config.RENDERER.kernel,
self.config.RENDERER.kernel))
neighbourhood = neighbourhood.unsqueeze_(0)
n_neighbourhood = neighbourhood.shape[1]
indices = indices.unsqueeze_(1)
indices_neighbourhood = indices + neighbourhood
indices = indices.long()
indices_neighbourhood = indices_neighbourhood.long()
indices_neighbourhood = indices_neighbourhood.view(n_points * n_neighbourhood, 3)
indices = indices.squeeze_(1)
features = grid[:, :,
indices_neighbourhood[:, 0],
indices_neighbourhood[:, 1],
indices_neighbourhood[:, 2]]
center_features = grid[:, :, indices[:, 0], indices[:, 1], indices[:, 2]]
features = features.permute(-1, 1, 0)
center_features = center_features.permute(-1, 1, 0)
try:
if not self.config.use_count_renderer:
features = features[:, :-1, :]
center_features = center_features[:, :-1, :]
else:
neighbourhood_count = features[:, -1, :].unsqueeze_(1)
center_count = center_features[:, -1, :].unsqueeze_(1)
max_count_neighbourhood = torch.max(neighbourhood_count)
max_count_center = torch.max(center_count)
max_count = torch.max(max_count_neighbourhood, max_count_center) + 1.e-09
features = torch.cat([features[:, :-1, :],
neighbourhood_count/max_count], dim=1)
center_features = torch.cat([center_features[:, :-1, :],
center_count/max_count], dim=1)
except:
pass
features = features.contiguous().view(n_points, n_neighbourhood * self.n_features)
center_features = center_features.squeeze_(-1)
if self.config.minimal_gpu:
df = df.to(self.config.device)
features = features.to(self.config.device)
center_features = center_features.to(self.config.device)
center_features = center_features.unsqueeze_(-1).unsqueeze_(-1)
center_features = self.feature_dropout(center_features)
center_features = center_features.squeeze_(-1).squeeze_(-1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
context_features = self.layer_context1(features)
input_features = torch.cat([center_features, context_features], dim=1)
features = self.layer1(input_features)
features = torch.cat([center_features, features], dim=1)
#features = features.unsqueeze_(-1).unsqueeze_(-1)
#features = self.feature_dropout(features)
#features = features.squeeze_(-1).squeeze_(-1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
features = self.layer2(features)
features = torch.cat([center_features, features], dim=1)
#features = features.unsqueeze_(-1).unsqueeze_(-1)
#features = self.feature_dropout(features)
#features = features.squeeze_(-1).squeeze_(-1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
features = self.layer3(features)
features = torch.cat([center_features, features], dim=1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
features = self.layer4(features)
output = []
sdf = self.output_scale * self.sdf_head(features)
output.append(sdf)
if self.config.RENDERER.occ_head:
occ = self.occ_head(features)
output.append(occ)
del features, context_features, center_features, df, \
indices, indices_neighbourhood, neighbourhood, \
points, sdf
return torch.cat(output, dim=1) | pipeline/translator.py | import torch
from utils.geometry import get_neighbourhood_indices
class Translator(torch.nn.Module):
def __init__(self, config):
super(Translator, self).__init__()
self.config = config
try:
self.n_features = config.n_features - (1 - int(config.use_count_renderer))
except:
self.n_features = config.n_features
self.output_scale = config.RENDERER.output_scale
activation = eval(config.RENDERER.activation)
self.layer_context1 = torch.nn.Sequential(torch.nn.Linear((self.config.RENDERER.kernel ** 3) * self.n_features, self.n_features),
torch.nn.LayerNorm([self.n_features], elementwise_affine=False),
activation)
self.layer1 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + self.n_features, 32),
activation)
self.layer2 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + 32 , 16),
activation)
self.layer3 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + 16, 8),
activation)
self.layer4 = torch.nn.Sequential(
torch.nn.Linear(self.n_features + 8, self.n_features),
activation)
if self.config.RENDERER.sdf_head:
self.sdf_head = torch.nn.Sequential(torch.nn.Linear(self.n_features, 1),
torch.nn.Tanh())
if self.config.RENDERER.occ_head:
self.occ_head = torch.nn.Sequential(torch.nn.Linear(self.n_features, 1),
torch.nn.Sigmoid())
self.padding = torch.nn.ReplicationPad3d(self.config.RENDERER.kernel // 2)
self.feature_dropout = torch.nn.Dropout2d(p=0.2)
self.relu = torch.nn.LeakyReLU()
self.tanh = torch.nn.Tanh()
self.sigmoid = torch.nn.Sigmoid()
self.hardtanh = torch.nn.Hardtanh(min_val=-0.06, max_val=0.06)
self.softsign = torch.nn.Softsign()
indices = []
for i in range(-1, 2):
for j in range(-1, 2):
for k in range(-1, 2):
indices.append(torch.Tensor([i, j, k]).view(1, 1, 3))
indices = torch.cat(indices, dim=0)
indices = indices.view(1, 27, 3)
self.index_shift = indices.int().clone()
# compute interpolation shift
indices = []
for i in range(0, 2):
for j in range(0, 2):
for k in range(0, 2):
indices.append(torch.Tensor([i, j, k]).view(1, 1, 3))
indices = torch.cat(indices, dim=0)
indices = indices.view(1, 8, 3)
self.interpolation_shift = indices.int().clone()
def forward(self, points, grid, padding=True):
if padding:
grid = self.padding(grid)
points = points + int(self.config.RENDERER.kernel // 2)
else:
pass
n_points = points.shape[0]
indices = points.floor()
df = 2. * (points - (indices + 0.5))
neighbourhood = get_neighbourhood_indices(indices, size=(self.config.RENDERER.kernel, self.config.RENDERER.kernel,
self.config.RENDERER.kernel))
neighbourhood = neighbourhood.unsqueeze_(0)
n_neighbourhood = neighbourhood.shape[1]
indices = indices.unsqueeze_(1)
indices_neighbourhood = indices + neighbourhood
indices = indices.long()
indices_neighbourhood = indices_neighbourhood.long()
indices_neighbourhood = indices_neighbourhood.view(n_points * n_neighbourhood, 3)
indices = indices.squeeze_(1)
features = grid[:, :,
indices_neighbourhood[:, 0],
indices_neighbourhood[:, 1],
indices_neighbourhood[:, 2]]
center_features = grid[:, :, indices[:, 0], indices[:, 1], indices[:, 2]]
features = features.permute(-1, 1, 0)
center_features = center_features.permute(-1, 1, 0)
try:
if not self.config.use_count_renderer:
features = features[:, :-1, :]
center_features = center_features[:, :-1, :]
else:
neighbourhood_count = features[:, -1, :].unsqueeze_(1)
center_count = center_features[:, -1, :].unsqueeze_(1)
max_count_neighbourhood = torch.max(neighbourhood_count)
max_count_center = torch.max(center_count)
max_count = torch.max(max_count_neighbourhood, max_count_center) + 1.e-09
features = torch.cat([features[:, :-1, :],
neighbourhood_count/max_count], dim=1)
center_features = torch.cat([center_features[:, :-1, :],
center_count/max_count], dim=1)
except:
pass
features = features.contiguous().view(n_points, n_neighbourhood * self.n_features)
center_features = center_features.squeeze_(-1)
if self.config.minimal_gpu:
df = df.to(self.config.device)
features = features.to(self.config.device)
center_features = center_features.to(self.config.device)
center_features = center_features.unsqueeze_(-1).unsqueeze_(-1)
center_features = self.feature_dropout(center_features)
center_features = center_features.squeeze_(-1).squeeze_(-1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
context_features = self.layer_context1(features)
input_features = torch.cat([center_features, context_features], dim=1)
features = self.layer1(input_features)
features = torch.cat([center_features, features], dim=1)
#features = features.unsqueeze_(-1).unsqueeze_(-1)
#features = self.feature_dropout(features)
#features = features.squeeze_(-1).squeeze_(-1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
features = self.layer2(features)
features = torch.cat([center_features, features], dim=1)
#features = features.unsqueeze_(-1).unsqueeze_(-1)
#features = self.feature_dropout(features)
#features = features.squeeze_(-1).squeeze_(-1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
features = self.layer3(features)
features = torch.cat([center_features, features], dim=1)
if self.config.RENDERER.superresolve:
features = torch.cat([df, features], dim=1)
features = self.layer4(features)
output = []
sdf = self.output_scale * self.sdf_head(features)
output.append(sdf)
if self.config.RENDERER.occ_head:
occ = self.occ_head(features)
output.append(occ)
del features, context_features, center_features, df, \
indices, indices_neighbourhood, neighbourhood, \
points, sdf
return torch.cat(output, dim=1) | 0.723114 | 0.381421 |
from fpl_reader.pseudo_object import PseudoObject
from fpl_reader.cool_io import CoolIO
from fpl_reader.windows_time import get_time_from_ticks
class Playlist:
def __init__(self, tracks):
self.tracks = tracks
def __repr__(self):
return (
'Playlist([\n'
+ ',\n\n'.join(repr(track) for track in self.tracks)
+ '\n])')
class Track(PseudoObject):
def __init__(self):
super(Track, self).__init__()
self.flags = None
self.subsong_index = None
self.file_name = None
self.file_size = None
self.file_time = None
self.duration = None
self.rpg_album = None
self.rpg_track = None
self.rpk_album = None
self.rpk_track = None
self.primary_keys = {}
self.secondary_keys = {}
def read_track(meta_io, index_io):
track = Track()
track.flags = index_io.read_s32_le()
file_name_offset = index_io.read_u32_le()
track.file_name = meta_io.seek(file_name_offset).read_to_zero()
track.subsong_index = index_io.read_u32_le()
if track.flags & 1 == 0:
# e.g. stream that was never played, so it has no meta
return track
track.file_size = index_io.read_s64_le()
track.file_time = get_time_from_ticks(index_io.read_u64_le())
track.duration = index_io.read_f64()
track.rpg_album = index_io.read_f32()
track.rpg_track = index_io.read_f32()
track.rpk_album = index_io.read_f32()
track.rpk_track = index_io.read_f32()
entry_count = index_io.read_u32_le()
entries = [index_io.read_u32_le() for _ in range(entry_count)]
primary_key_count = entries.pop(0)
secondary_key_count = entries.pop(0)
secondary_keys_offset = entries.pop(0)
primary_key_name_offsets = {}
for _ in range(primary_key_count):
key_name_id = entries.pop(0)
key_name_offset = entries.pop(0)
primary_key_name_offsets[key_name_id] = key_name_offset
entries.pop(0) # unk0
primary_key_value_offsets = [
entries.pop(0)
for _ in range(primary_key_count)
]
track.primary_keys = {}
last_key_offset = None
for i in range(primary_key_count):
# foobar2000's properties window duplicates and concatenates the value
# when discontiguous keys are detected; we do not.
last_key_offset = primary_key_name_offsets.get(i, last_key_offset)
value_offset = primary_key_value_offsets[i]
if last_key_offset is None:
raise RuntimeError('Missing first primary key, now what?')
key = meta_io.seek(last_key_offset).read_to_zero()
value = meta_io.seek(value_offset).read_to_zero()
track.primary_keys[key] = value
assert primary_key_count * 3 + 1 <= secondary_keys_offset
for _ in range(secondary_keys_offset - (primary_key_count * 3 + 1)):
entries.pop(0)
track.secondary_keys = {}
for _ in range(secondary_key_count):
key_offset = entries.pop(0)
value_offset = entries.pop(0)
key = meta_io.seek(key_offset).read_to_zero()
value = meta_io.seek(value_offset).read_to_zero()
track.secondary_keys[key] = value
if track.flags & 0x04:
_padding = index_io.read(64)
return track
def read_playlist(data):
magic = b'\xE1\xA0\x9C\x91\xF8\x3C\x77\x42\x85\x2C\x3B\xCC\x14\x01\xD3\xF2'
tracks = []
with CoolIO(data) as handle:
if handle.read(len(magic)) != magic:
raise RuntimeError('Not a FPL file')
meta_size = handle.read_u32_le()
meta = handle.read(meta_size)
track_count = handle.read_u32_le()
with CoolIO(meta) as meta_io, CoolIO(handle.read_to_eof()) as index_io:
for track_no in range(track_count):
track = read_track(meta_io, index_io)
tracks.append(track)
return Playlist(tracks) | fpl_reader/playlist_reader.py | from fpl_reader.pseudo_object import PseudoObject
from fpl_reader.cool_io import CoolIO
from fpl_reader.windows_time import get_time_from_ticks
class Playlist:
def __init__(self, tracks):
self.tracks = tracks
def __repr__(self):
return (
'Playlist([\n'
+ ',\n\n'.join(repr(track) for track in self.tracks)
+ '\n])')
class Track(PseudoObject):
def __init__(self):
super(Track, self).__init__()
self.flags = None
self.subsong_index = None
self.file_name = None
self.file_size = None
self.file_time = None
self.duration = None
self.rpg_album = None
self.rpg_track = None
self.rpk_album = None
self.rpk_track = None
self.primary_keys = {}
self.secondary_keys = {}
def read_track(meta_io, index_io):
track = Track()
track.flags = index_io.read_s32_le()
file_name_offset = index_io.read_u32_le()
track.file_name = meta_io.seek(file_name_offset).read_to_zero()
track.subsong_index = index_io.read_u32_le()
if track.flags & 1 == 0:
# e.g. stream that was never played, so it has no meta
return track
track.file_size = index_io.read_s64_le()
track.file_time = get_time_from_ticks(index_io.read_u64_le())
track.duration = index_io.read_f64()
track.rpg_album = index_io.read_f32()
track.rpg_track = index_io.read_f32()
track.rpk_album = index_io.read_f32()
track.rpk_track = index_io.read_f32()
entry_count = index_io.read_u32_le()
entries = [index_io.read_u32_le() for _ in range(entry_count)]
primary_key_count = entries.pop(0)
secondary_key_count = entries.pop(0)
secondary_keys_offset = entries.pop(0)
primary_key_name_offsets = {}
for _ in range(primary_key_count):
key_name_id = entries.pop(0)
key_name_offset = entries.pop(0)
primary_key_name_offsets[key_name_id] = key_name_offset
entries.pop(0) # unk0
primary_key_value_offsets = [
entries.pop(0)
for _ in range(primary_key_count)
]
track.primary_keys = {}
last_key_offset = None
for i in range(primary_key_count):
# foobar2000's properties window duplicates and concatenates the value
# when discontiguous keys are detected; we do not.
last_key_offset = primary_key_name_offsets.get(i, last_key_offset)
value_offset = primary_key_value_offsets[i]
if last_key_offset is None:
raise RuntimeError('Missing first primary key, now what?')
key = meta_io.seek(last_key_offset).read_to_zero()
value = meta_io.seek(value_offset).read_to_zero()
track.primary_keys[key] = value
assert primary_key_count * 3 + 1 <= secondary_keys_offset
for _ in range(secondary_keys_offset - (primary_key_count * 3 + 1)):
entries.pop(0)
track.secondary_keys = {}
for _ in range(secondary_key_count):
key_offset = entries.pop(0)
value_offset = entries.pop(0)
key = meta_io.seek(key_offset).read_to_zero()
value = meta_io.seek(value_offset).read_to_zero()
track.secondary_keys[key] = value
if track.flags & 0x04:
_padding = index_io.read(64)
return track
def read_playlist(data):
magic = b'\xE1\xA0\x9C\x91\xF8\x3C\x77\x42\x85\x2C\x3B\xCC\x14\x01\xD3\xF2'
tracks = []
with CoolIO(data) as handle:
if handle.read(len(magic)) != magic:
raise RuntimeError('Not a FPL file')
meta_size = handle.read_u32_le()
meta = handle.read(meta_size)
track_count = handle.read_u32_le()
with CoolIO(meta) as meta_io, CoolIO(handle.read_to_eof()) as index_io:
for track_no in range(track_count):
track = read_track(meta_io, index_io)
tracks.append(track)
return Playlist(tracks) | 0.611498 | 0.185246 |
import dicom
import SimpleITK as sitk
import numpy as np
import csv
import os
from collections import defaultdict
import cPickle as pickle
import glob
import utils
def read_pkl(path):
d = pickle.load(open(path, "rb"))
return d['pixel_data'], d['origin'], d['spacing']
def read_mhd(path):
itk_data = sitk.ReadImage(path.encode('utf-8'))
pixel_data = sitk.GetArrayFromImage(itk_data)
origin = np.array(list(reversed(itk_data.GetOrigin())))
spacing = np.array(list(reversed(itk_data.GetSpacing())))
return pixel_data, origin, spacing
def world2voxel(world_coord, origin, spacing):
stretched_voxel_coord = np.absolute(world_coord - origin)
voxel_coord = stretched_voxel_coord / spacing
return voxel_coord
def read_dicom(path):
d = dicom.read_file(path)
metadata = {}
for attr in dir(d):
if attr[0].isupper() and attr != 'PixelData':
try:
metadata[attr] = getattr(d, attr)
except AttributeError:
pass
metadata['InstanceNumber'] = int(metadata['InstanceNumber'])
metadata['PixelSpacing'] = np.float32(metadata['PixelSpacing'])
metadata['ImageOrientationPatient'] = np.float32(metadata['ImageOrientationPatient'])
try:
metadata['SliceLocation'] = np.float32(metadata['SliceLocation'])
except:
metadata['SliceLocation'] = None
metadata['ImagePositionPatient'] = np.float32(metadata['ImagePositionPatient'])
metadata['Rows'] = int(metadata['Rows'])
metadata['Columns'] = int(metadata['Columns'])
metadata['RescaleSlope'] = float(metadata['RescaleSlope'])
metadata['RescaleIntercept'] = float(metadata['RescaleIntercept'])
return np.array(d.pixel_array), metadata
def extract_pid_dir(patient_data_path):
return patient_data_path.split('/')[-1]
def extract_pid_filename(file_path, replace_str='.mhd'):
return os.path.basename(file_path).replace(replace_str, '').replace('.pkl', '')
def get_candidates_paths(path):
id2candidates_path = {}
file_paths = sorted(glob.glob(path + '/*.pkl'))
for p in file_paths:
pid = extract_pid_filename(p, '.pkl')
id2candidates_path[pid] = p
return id2candidates_path
def get_patient_data(patient_data_path):
slice_paths = os.listdir(patient_data_path)
sid2data = {}
sid2metadata = {}
for s in slice_paths:
slice_id = s.split('.')[0]
data, metadata = read_dicom(patient_data_path + '/' + s)
sid2data[slice_id] = data
sid2metadata[slice_id] = metadata
return sid2data, sid2metadata
def ct2HU(x, metadata):
x = metadata['RescaleSlope'] * x + metadata['RescaleIntercept']
x[x < -1000] = -1000
return x
def read_dicom_scan(patient_data_path):
sid2data, sid2metadata = get_patient_data(patient_data_path)
sid2position = {}
for sid in sid2data.keys():
sid2position[sid] = get_slice_position(sid2metadata[sid])
sids_sorted = sorted(sid2position.items(), key=lambda x: x[1])
sids_sorted = [s[0] for s in sids_sorted]
z_pixel_spacing = []
for s1, s2 in zip(sids_sorted[1:], sids_sorted[:-1]):
z_pixel_spacing.append(sid2position[s1] - sid2position[s2])
z_pixel_spacing = np.array(z_pixel_spacing)
try:
assert np.all((z_pixel_spacing - z_pixel_spacing[0]) < 0.01)
except:
print 'This patient has multiple series, we will remove one'
sids_sorted_2 = []
for s1, s2 in zip(sids_sorted[::2], sids_sorted[1::2]):
if sid2metadata[s1]["InstanceNumber"] > sid2metadata[s2]["InstanceNumber"]:
sids_sorted_2.append(s1)
else:
sids_sorted_2.append(s2)
sids_sorted = sids_sorted_2
z_pixel_spacing = []
for s1, s2 in zip(sids_sorted[1:], sids_sorted[:-1]):
z_pixel_spacing.append(sid2position[s1] - sid2position[s2])
z_pixel_spacing = np.array(z_pixel_spacing)
assert np.all((z_pixel_spacing - z_pixel_spacing[0]) < 0.01)
pixel_spacing = np.array((z_pixel_spacing[0],
sid2metadata[sids_sorted[0]]['PixelSpacing'][0],
sid2metadata[sids_sorted[0]]['PixelSpacing'][1]))
img = np.stack([ct2HU(sid2data[sid], sid2metadata[sid]) for sid in sids_sorted])
return img, pixel_spacing
def sort_slices_position(patient_data):
return sorted(patient_data, key=lambda x: get_slice_position(x['metadata']))
def sort_sids_by_position(sid2metadata):
return sorted(sid2metadata.keys(), key=lambda x: get_slice_position(sid2metadata[x]))
def sort_slices_jonas(sid2metadata):
sid2position = slice_location_finder(sid2metadata)
return sorted(sid2metadata.keys(), key=lambda x: sid2position[x])
def get_slice_position(slice_metadata):
"""
https://www.kaggle.com/rmchamberlain/data-science-bowl-2017/dicom-to-3d-numpy-arrays
"""
orientation = tuple((o for o in slice_metadata['ImageOrientationPatient']))
position = tuple((p for p in slice_metadata['ImagePositionPatient']))
rowvec, colvec = orientation[:3], orientation[3:]
normal_vector = np.cross(rowvec, colvec)
slice_pos = np.dot(position, normal_vector)
return slice_pos
def slice_location_finder(sid2metadata):
"""
:param slicepath2metadata: dict with arbitrary keys, and metadata values
:return:
"""
sid2midpix = {}
sid2position = {}
for sid in sid2metadata:
metadata = sid2metadata[sid]
image_orientation = metadata["ImageOrientationPatient"]
image_position = metadata["ImagePositionPatient"]
pixel_spacing = metadata["PixelSpacing"]
rows = metadata['Rows']
columns = metadata['Columns']
# calculate value of middle pixel
F = np.array(image_orientation).reshape((2, 3))
# reversed order, as per http://nipy.org/nibabel/dicom/dicom_orientation.html
i, j = columns / 2.0, rows / 2.0
im_pos = np.array([[i * pixel_spacing[0], j * pixel_spacing[1]]], dtype='float32')
pos = np.array(image_position).reshape((1, 3))
position = np.dot(im_pos, F) + pos
sid2midpix[sid] = position[0, :]
if len(sid2midpix) <= 1:
for sp, midpix in sid2midpix.iteritems():
sid2position[sp] = 0.
else:
# find the keys of the 2 points furthest away from each other
max_dist = -1.0
max_dist_keys = []
for sp1, midpix1 in sid2midpix.iteritems():
for sp2, midpix2 in sid2midpix.iteritems():
if sp1 == sp2:
continue
distance = np.sqrt(np.sum((midpix1 - midpix2) ** 2))
if distance > max_dist:
max_dist_keys = [sp1, sp2]
max_dist = distance
# project the others on the line between these 2 points
# sort the keys, so the order is more or less the same as they were
# max_dist_keys.sort(key=lambda x: int(re.search(r'/sax_(\d+)\.pkl$', x).group(1)))
p_ref1 = sid2midpix[max_dist_keys[0]]
p_ref2 = sid2midpix[max_dist_keys[1]]
v1 = p_ref2 - p_ref1
v1 /= np.linalg.norm(v1)
for sp, midpix in sid2midpix.iteritems():
v2 = midpix - p_ref1
sid2position[sp] = np.inner(v1, v2)
return sid2position
def get_patient_data_paths(data_dir):
pids = sorted(os.listdir(data_dir))
return [data_dir + '/' + p for p in pids]
def read_patient_annotations_luna(pid, directory):
return pickle.load(open(os.path.join(directory,pid+'.pkl'),"rb"))
def read_labels(file_path):
id2labels = {}
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, label = item.replace('\n', '').split(',')
id2labels[id] = int(label)
return id2labels
def read_test_labels(file_path):
id2labels = {}
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, label = item.replace('\n', '').split(';')
id2labels[id] = int(label)
return id2labels
def read_luna_annotations(file_path):
id2xyzd = defaultdict(list)
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, x, y, z, d = item.replace('\n', '').split(',')
id2xyzd[id].append([float(z), float(y), float(x), float(d)])
return id2xyzd
def read_luna_negative_candidates(file_path):
id2xyzd = defaultdict(list)
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, x, y, z, d = item.replace('\n', '').split(',')
if float(d) == 0:
id2xyzd[id].append([float(z), float(y), float(x), float(d)])
return id2xyzd
def write_submission(pid2prediction, submission_path):
"""
:param pid2prediction: dict of {patient_id: label}
:param submission_path:
"""
f = open(submission_path, 'w+')
fo = csv.writer(f, lineterminator='\n')
fo.writerow(['id', 'cancer'])
for pid in pid2prediction.keys():
fo.writerow([pid, pid2prediction[pid]])
f.close()
def filter_close_neighbors(candidates, min_dist=16):
#TODO pixelspacing should be added , it is now hardcoded
candidates_wo_dupes = set()
no_pairs = 0
for can1 in candidates:
found_close_candidate = False
swap_candidate = None
for can2 in candidates_wo_dupes:
if (can1 == can2).all():
raise "Candidate should not be in the target array yet"
else:
delta = can1[:3] - can2[:3]
delta[0] = 2.5*delta[0] #zyx coos
dist = np.sum(delta**2)**(1./2)
if dist<min_dist:
no_pairs += 1
print 'Warning: there is a pair nodules close together', can1[:3], can2[:3]
found_close_candidate = True
if can1[4]>can2[4]:
swap_candidate = can2
break
if not found_close_candidate:
candidates_wo_dupes.add(tuple(can1))
elif swap_candidate:
candidates_wo_dupes.remove(swap_candidate)
candidates_wo_dupes.add(tuple(can1))
print 'n candidates filtered out', no_pairs
return candidates_wo_dupes
def dice_index(predictions, targets, epsilon=1e-12):
predictions = np.asarray(predictions).flatten()
targets = np.asarray(targets).flatten()
dice = (2. * np.sum(targets * predictions) + epsilon) / (np.sum(predictions) + np.sum(targets) + epsilon)
return dice
def cross_entropy(predictions, targets, epsilon=1e-12):
predictions = np.asarray(predictions).flatten()
predictions = np.clip(predictions, epsilon, 1. - epsilon)
targets = np.asarray(targets).flatten()
ce = np.mean(np.log(predictions) * targets + np.log(1 - predictions) * (1. - targets))
return ce
def get_generated_pids(predictions_dir):
pids = []
if os.path.isdir(predictions_dir):
pids = os.listdir(predictions_dir)
pids = [extract_pid_filename(p) for p in pids]
return pids
def evaluate_log_loss(pid2prediction, pid2label):
predictions, labels = [], []
assert set(pid2prediction.keys()) == set(pid2label.keys())
for k, v in pid2prediction.iteritems():
predictions.append(v)
labels.append(pid2label[k])
return log_loss(labels, predictions)
def log_loss(y_real, y_pred, eps=1e-15):
y_pred = np.clip(y_pred, eps, 1 - eps)
y_real = np.array(y_real)
losses = y_real * np.log(y_pred) + (1 - y_real) * np.log(1 - y_pred)
return - np.average(losses)
def read_luna_properties(file_path):
id2xyzp = defaultdict(list)
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
annotation = item.replace('\n', '').split(',')
id = annotation[0]
x = float(annotation[1])
y = float(annotation[2])
z = float(annotation[3])
d = float(annotation[4])
properties_dict = {
'diameter': d,
'calcification': float(annotation[5]),
'internalStructure': float(annotation[6]),
'lobulation': float(annotation[7]),
'malignancy': float(annotation[8]),
'margin': float(annotation[9]),
'sphericity': float(annotation[10]),
'spiculation': float(annotation[11]),
'subtlety': float(annotation[12]),
'texture': float(annotation[13]),
}
id2xyzp[id].append([z, y, x, d, properties_dict])
return id2xyzp | utils_lung.py | import dicom
import SimpleITK as sitk
import numpy as np
import csv
import os
from collections import defaultdict
import cPickle as pickle
import glob
import utils
def read_pkl(path):
d = pickle.load(open(path, "rb"))
return d['pixel_data'], d['origin'], d['spacing']
def read_mhd(path):
itk_data = sitk.ReadImage(path.encode('utf-8'))
pixel_data = sitk.GetArrayFromImage(itk_data)
origin = np.array(list(reversed(itk_data.GetOrigin())))
spacing = np.array(list(reversed(itk_data.GetSpacing())))
return pixel_data, origin, spacing
def world2voxel(world_coord, origin, spacing):
stretched_voxel_coord = np.absolute(world_coord - origin)
voxel_coord = stretched_voxel_coord / spacing
return voxel_coord
def read_dicom(path):
d = dicom.read_file(path)
metadata = {}
for attr in dir(d):
if attr[0].isupper() and attr != 'PixelData':
try:
metadata[attr] = getattr(d, attr)
except AttributeError:
pass
metadata['InstanceNumber'] = int(metadata['InstanceNumber'])
metadata['PixelSpacing'] = np.float32(metadata['PixelSpacing'])
metadata['ImageOrientationPatient'] = np.float32(metadata['ImageOrientationPatient'])
try:
metadata['SliceLocation'] = np.float32(metadata['SliceLocation'])
except:
metadata['SliceLocation'] = None
metadata['ImagePositionPatient'] = np.float32(metadata['ImagePositionPatient'])
metadata['Rows'] = int(metadata['Rows'])
metadata['Columns'] = int(metadata['Columns'])
metadata['RescaleSlope'] = float(metadata['RescaleSlope'])
metadata['RescaleIntercept'] = float(metadata['RescaleIntercept'])
return np.array(d.pixel_array), metadata
def extract_pid_dir(patient_data_path):
return patient_data_path.split('/')[-1]
def extract_pid_filename(file_path, replace_str='.mhd'):
return os.path.basename(file_path).replace(replace_str, '').replace('.pkl', '')
def get_candidates_paths(path):
id2candidates_path = {}
file_paths = sorted(glob.glob(path + '/*.pkl'))
for p in file_paths:
pid = extract_pid_filename(p, '.pkl')
id2candidates_path[pid] = p
return id2candidates_path
def get_patient_data(patient_data_path):
slice_paths = os.listdir(patient_data_path)
sid2data = {}
sid2metadata = {}
for s in slice_paths:
slice_id = s.split('.')[0]
data, metadata = read_dicom(patient_data_path + '/' + s)
sid2data[slice_id] = data
sid2metadata[slice_id] = metadata
return sid2data, sid2metadata
def ct2HU(x, metadata):
x = metadata['RescaleSlope'] * x + metadata['RescaleIntercept']
x[x < -1000] = -1000
return x
def read_dicom_scan(patient_data_path):
sid2data, sid2metadata = get_patient_data(patient_data_path)
sid2position = {}
for sid in sid2data.keys():
sid2position[sid] = get_slice_position(sid2metadata[sid])
sids_sorted = sorted(sid2position.items(), key=lambda x: x[1])
sids_sorted = [s[0] for s in sids_sorted]
z_pixel_spacing = []
for s1, s2 in zip(sids_sorted[1:], sids_sorted[:-1]):
z_pixel_spacing.append(sid2position[s1] - sid2position[s2])
z_pixel_spacing = np.array(z_pixel_spacing)
try:
assert np.all((z_pixel_spacing - z_pixel_spacing[0]) < 0.01)
except:
print 'This patient has multiple series, we will remove one'
sids_sorted_2 = []
for s1, s2 in zip(sids_sorted[::2], sids_sorted[1::2]):
if sid2metadata[s1]["InstanceNumber"] > sid2metadata[s2]["InstanceNumber"]:
sids_sorted_2.append(s1)
else:
sids_sorted_2.append(s2)
sids_sorted = sids_sorted_2
z_pixel_spacing = []
for s1, s2 in zip(sids_sorted[1:], sids_sorted[:-1]):
z_pixel_spacing.append(sid2position[s1] - sid2position[s2])
z_pixel_spacing = np.array(z_pixel_spacing)
assert np.all((z_pixel_spacing - z_pixel_spacing[0]) < 0.01)
pixel_spacing = np.array((z_pixel_spacing[0],
sid2metadata[sids_sorted[0]]['PixelSpacing'][0],
sid2metadata[sids_sorted[0]]['PixelSpacing'][1]))
img = np.stack([ct2HU(sid2data[sid], sid2metadata[sid]) for sid in sids_sorted])
return img, pixel_spacing
def sort_slices_position(patient_data):
return sorted(patient_data, key=lambda x: get_slice_position(x['metadata']))
def sort_sids_by_position(sid2metadata):
return sorted(sid2metadata.keys(), key=lambda x: get_slice_position(sid2metadata[x]))
def sort_slices_jonas(sid2metadata):
sid2position = slice_location_finder(sid2metadata)
return sorted(sid2metadata.keys(), key=lambda x: sid2position[x])
def get_slice_position(slice_metadata):
"""
https://www.kaggle.com/rmchamberlain/data-science-bowl-2017/dicom-to-3d-numpy-arrays
"""
orientation = tuple((o for o in slice_metadata['ImageOrientationPatient']))
position = tuple((p for p in slice_metadata['ImagePositionPatient']))
rowvec, colvec = orientation[:3], orientation[3:]
normal_vector = np.cross(rowvec, colvec)
slice_pos = np.dot(position, normal_vector)
return slice_pos
def slice_location_finder(sid2metadata):
"""
:param slicepath2metadata: dict with arbitrary keys, and metadata values
:return:
"""
sid2midpix = {}
sid2position = {}
for sid in sid2metadata:
metadata = sid2metadata[sid]
image_orientation = metadata["ImageOrientationPatient"]
image_position = metadata["ImagePositionPatient"]
pixel_spacing = metadata["PixelSpacing"]
rows = metadata['Rows']
columns = metadata['Columns']
# calculate value of middle pixel
F = np.array(image_orientation).reshape((2, 3))
# reversed order, as per http://nipy.org/nibabel/dicom/dicom_orientation.html
i, j = columns / 2.0, rows / 2.0
im_pos = np.array([[i * pixel_spacing[0], j * pixel_spacing[1]]], dtype='float32')
pos = np.array(image_position).reshape((1, 3))
position = np.dot(im_pos, F) + pos
sid2midpix[sid] = position[0, :]
if len(sid2midpix) <= 1:
for sp, midpix in sid2midpix.iteritems():
sid2position[sp] = 0.
else:
# find the keys of the 2 points furthest away from each other
max_dist = -1.0
max_dist_keys = []
for sp1, midpix1 in sid2midpix.iteritems():
for sp2, midpix2 in sid2midpix.iteritems():
if sp1 == sp2:
continue
distance = np.sqrt(np.sum((midpix1 - midpix2) ** 2))
if distance > max_dist:
max_dist_keys = [sp1, sp2]
max_dist = distance
# project the others on the line between these 2 points
# sort the keys, so the order is more or less the same as they were
# max_dist_keys.sort(key=lambda x: int(re.search(r'/sax_(\d+)\.pkl$', x).group(1)))
p_ref1 = sid2midpix[max_dist_keys[0]]
p_ref2 = sid2midpix[max_dist_keys[1]]
v1 = p_ref2 - p_ref1
v1 /= np.linalg.norm(v1)
for sp, midpix in sid2midpix.iteritems():
v2 = midpix - p_ref1
sid2position[sp] = np.inner(v1, v2)
return sid2position
def get_patient_data_paths(data_dir):
pids = sorted(os.listdir(data_dir))
return [data_dir + '/' + p for p in pids]
def read_patient_annotations_luna(pid, directory):
return pickle.load(open(os.path.join(directory,pid+'.pkl'),"rb"))
def read_labels(file_path):
id2labels = {}
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, label = item.replace('\n', '').split(',')
id2labels[id] = int(label)
return id2labels
def read_test_labels(file_path):
id2labels = {}
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, label = item.replace('\n', '').split(';')
id2labels[id] = int(label)
return id2labels
def read_luna_annotations(file_path):
id2xyzd = defaultdict(list)
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, x, y, z, d = item.replace('\n', '').split(',')
id2xyzd[id].append([float(z), float(y), float(x), float(d)])
return id2xyzd
def read_luna_negative_candidates(file_path):
id2xyzd = defaultdict(list)
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
id, x, y, z, d = item.replace('\n', '').split(',')
if float(d) == 0:
id2xyzd[id].append([float(z), float(y), float(x), float(d)])
return id2xyzd
def write_submission(pid2prediction, submission_path):
"""
:param pid2prediction: dict of {patient_id: label}
:param submission_path:
"""
f = open(submission_path, 'w+')
fo = csv.writer(f, lineterminator='\n')
fo.writerow(['id', 'cancer'])
for pid in pid2prediction.keys():
fo.writerow([pid, pid2prediction[pid]])
f.close()
def filter_close_neighbors(candidates, min_dist=16):
#TODO pixelspacing should be added , it is now hardcoded
candidates_wo_dupes = set()
no_pairs = 0
for can1 in candidates:
found_close_candidate = False
swap_candidate = None
for can2 in candidates_wo_dupes:
if (can1 == can2).all():
raise "Candidate should not be in the target array yet"
else:
delta = can1[:3] - can2[:3]
delta[0] = 2.5*delta[0] #zyx coos
dist = np.sum(delta**2)**(1./2)
if dist<min_dist:
no_pairs += 1
print 'Warning: there is a pair nodules close together', can1[:3], can2[:3]
found_close_candidate = True
if can1[4]>can2[4]:
swap_candidate = can2
break
if not found_close_candidate:
candidates_wo_dupes.add(tuple(can1))
elif swap_candidate:
candidates_wo_dupes.remove(swap_candidate)
candidates_wo_dupes.add(tuple(can1))
print 'n candidates filtered out', no_pairs
return candidates_wo_dupes
def dice_index(predictions, targets, epsilon=1e-12):
predictions = np.asarray(predictions).flatten()
targets = np.asarray(targets).flatten()
dice = (2. * np.sum(targets * predictions) + epsilon) / (np.sum(predictions) + np.sum(targets) + epsilon)
return dice
def cross_entropy(predictions, targets, epsilon=1e-12):
predictions = np.asarray(predictions).flatten()
predictions = np.clip(predictions, epsilon, 1. - epsilon)
targets = np.asarray(targets).flatten()
ce = np.mean(np.log(predictions) * targets + np.log(1 - predictions) * (1. - targets))
return ce
def get_generated_pids(predictions_dir):
pids = []
if os.path.isdir(predictions_dir):
pids = os.listdir(predictions_dir)
pids = [extract_pid_filename(p) for p in pids]
return pids
def evaluate_log_loss(pid2prediction, pid2label):
predictions, labels = [], []
assert set(pid2prediction.keys()) == set(pid2label.keys())
for k, v in pid2prediction.iteritems():
predictions.append(v)
labels.append(pid2label[k])
return log_loss(labels, predictions)
def log_loss(y_real, y_pred, eps=1e-15):
y_pred = np.clip(y_pred, eps, 1 - eps)
y_real = np.array(y_real)
losses = y_real * np.log(y_pred) + (1 - y_real) * np.log(1 - y_pred)
return - np.average(losses)
def read_luna_properties(file_path):
id2xyzp = defaultdict(list)
train_csv = open(file_path)
lines = train_csv.readlines()
i = 0
for item in lines:
if i == 0:
i = 1
continue
annotation = item.replace('\n', '').split(',')
id = annotation[0]
x = float(annotation[1])
y = float(annotation[2])
z = float(annotation[3])
d = float(annotation[4])
properties_dict = {
'diameter': d,
'calcification': float(annotation[5]),
'internalStructure': float(annotation[6]),
'lobulation': float(annotation[7]),
'malignancy': float(annotation[8]),
'margin': float(annotation[9]),
'sphericity': float(annotation[10]),
'spiculation': float(annotation[11]),
'subtlety': float(annotation[12]),
'texture': float(annotation[13]),
}
id2xyzp[id].append([z, y, x, d, properties_dict])
return id2xyzp | 0.430866 | 0.254277 |
import sys
from pathlib import Path, PurePath
from PyQt5 import QtCore, QtGui
from PyQt5.QtCore import Qt, QObject, QSettings, QDir
from PyQt5.QtWidgets import (QApplication, QDialog, QGridLayout, QLabel, QLineEdit,
QPushButton, QFileDialog, QWidget, QGroupBox, QVBoxLayout,
QDialogButtonBox, QSizePolicy)
class IPProject:
basePath = None # base path projects are saved in
projectPath = None # path of actual project
dataPath = None # where locally stored data (can) be saved
beamformingResutsPath = None # path to csv files holding fstat, ba, and tracev data
detectionsPath = None # where picks will be saved
customFilterPath = None # where custom filters will be saved
homePath = None # user's home directory
stationsPath = None # where station xml files will be saved
projectName = None
projectFileName = None
def __init__(self):
self.__globalSettings = QSettings('LANL', 'InfraView')
# print(self.__globalSettings)
def makeNewProject(self):
newDialog = IPNewProjectDialog(self)
if newDialog.exec_():
self.basePath, self.projectName = newDialog.getBasePathAndProjectName()
self.projectPath = Path(str(self.basePath) + '/' + self.projectName)
self.dataPath = Path(str(self.projectPath) + '/data')
self.detectionsPath = Path(str(self.projectPath) + '/detections')
self.stationsPath = Path(str(self.projectPath) + '/stations')
self.customFilterPath = Path(str(self.projectPath) + '/customFilters')
self.beamformingResutsPath = Path(str(self.projectPath) + '/beamformingResults')
# Create the project directories
self.projectPath.mkdir(parents=True, exist_ok=True)
self.dataPath.mkdir(parents=True, exist_ok=True)
self.detectionsPath.mkdir(parents=True, exist_ok=True)
self.stationsPath.mkdir(parents=True, exist_ok=True)
self.customFilterPath.mkdir(parents=True, exist_ok=True)
self.beamformingResutsPath.mkdir(parents=True, exist_ok=True)
# Create a settings object/file for the new project and populate it with the directories
self.projectFileName = self.projectName + '.ipprj'
self.projectSettings = QSettings(str(self.projectPath) + '/' + self.projectFileName, QSettings.IniFormat)
self.projectSettings.beginGroup('Main')
self.projectSettings.setValue('projectName', str(self.projectName))
self.projectSettings.endGroup()
self.projectSettings.beginGroup('PathNames')
self.projectSettings.setValue('basePathName', str(self.basePath))
self.projectSettings.setValue('projectPathName', str(self.projectPath))
self.projectSettings.setValue('dataPathName', str(self.dataPath))
self.projectSettings.setValue('detectionsPathName', str(self.detectionsPath))
self.projectSettings.setValue('stationsPathName', str(self.stationsPath))
self.projectSettings.setValue('customFilterPathName', str(self.customFilterPath))
self.projectSettings.setValue('beamformingResultsPath', str(self.beamformingResutsPath))
self.projectSettings.endGroup()
return True
else:
return False
def loadProject(self):
mydirectory = self.__globalSettings.value('last_baseProject_directory', self.homePath)
ipprjPathname, _ = QFileDialog.getOpenFileName(
caption='Open InfraView Project', directory=mydirectory, filter='InfraView Project Files (*.ipprj)')
if ipprjPathname:
self.projectSettings = QSettings(ipprjPathname, QSettings.IniFormat)
self.projectSettings.beginGroup('Main')
self.projectName = self.projectSettings.value('projectName')
self.projectFileName = self.projectName + '.ipprj'
self.projectSettings.endGroup()
self.projectSettings.beginGroup('PathNames')
self.basePath = Path(self.projectSettings.value('basePathName'))
self.projectPath = Path(self.projectSettings.value('projectPathName'))
self.dataPath = Path(self.projectSettings.value('dataPathName'))
self.detectionsPath = Path(self.projectSettings.value('detectionsPathName'))
self.stationsPath = Path(self.projectSettings.value('stationsPathName'))
self.customFilterPath = Path(self.projectSettings.value('customFilterPathName'))
# when opening old projects, newer settings might not be present
if self.projectSettings.value('beamformingResultsPath') is None:
self.beamformingResutsPath = Path(str(self.projectPath) + '/beamformingResults')
else:
self.beamformingResutsPath = Path(self.projectSettings.value('beamformingResultsPath'))
self.projectSettings.endGroup()
return True
else:
return False
def get_basePath(self):
return self.basePath
def get_projectPath(self):
return self.projectPath
def get_dataPath(self):
return self.dataPath
def set_dataPath(self, path):
self.dataPath = path
def get_detectionsPath(self):
return self.detectionsPath
def get_stationsPath(self):
return self.stationsPath
def get_customFilterPath(self):
return self.customFilterPath
def get_projectName(self):
return self.projectName
def get_projectFileName(self):
return self.projectFileName
def get_beamformResultsPath(self):
return self.beamformingResutsPath
def clear(self):
self.basePath = None # base path projects are saved in
self.projectPath = None # path of actual project
self.dataPath = None # where locally stored data (can) be saved
self.detectionsPath = None # where picks will be saved
self.stationsPath = None # where exported picks will be saved
self.customFilterPath = None # where custom filters will be saved
self.homePath = None # user's home directory
self.projectName = None
self.projectFileName = None
self.beamformingResutsPath = None # beamforming results directory
class IPNewProjectDialog(QDialog):
basePath = None
projectName = None
def __init__(self, parent):
super().__init__()
homePath = Path.home()
self.basePath = Path(homePath, 'IPProjects')
self.buildUI()
def buildUI(self):
self.setWindowTitle('Create a New Project')
label_projectName = QLabel(self.tr('Project Name: '))
self.lineEdit_projectName = QLineEdit()
self.lineEdit_projectName.textChanged.connect(self.updateProjectPath)
label_basePath = QLabel(self.tr('Base Directory: '))
self.lineEdit_basePath = QLineEdit(str(self.basePath))
self.lineEdit_basePath.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Preferred)
self.lineEdit_basePath.setMinimumWidth(400)
self.lineEdit_basePath.textChanged.connect(self.updateProjectPath)
button_basePathEdit = QPushButton('Edit...')
button_basePathEdit.clicked.connect(self.directoryDialog)
self.label_projectDirectory = QLabel('Project Directory: ')
self.label_projectDirectory_value = QLabel(str(self.basePath) + '/' + self.lineEdit_projectName.text())
buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal, self)
buttons.accepted.connect(self.accept)
buttons.rejected.connect(self.reject)
gridWidget = QWidget()
gridlayout = QGridLayout()
gridlayout.addWidget(label_projectName, 0, 0)
gridlayout.addWidget(self.lineEdit_projectName, 0, 1)
gridlayout.addWidget(label_basePath, 1, 0)
gridlayout.addWidget(self.lineEdit_basePath, 1, 1)
gridlayout.addWidget(button_basePathEdit, 1, 2)
gridlayout.addWidget(self.label_projectDirectory, 2, 0)
gridlayout.addWidget(self.label_projectDirectory_value, 2, 1)
gridWidget.setLayout(gridlayout)
mainLayout = QVBoxLayout()
mainLayout.addWidget(gridWidget)
mainLayout.addWidget(buttons)
self.setLayout(mainLayout)
def updateProjectPath(self):
self.basePath = self.lineEdit_basePath.text()
self.projectName = self.lineEdit_projectName.text()
self.label_projectDirectory_value.setText(self.lineEdit_basePath.text() + '/' + self.lineEdit_projectName.text())
def directoryDialog(self):
newBasePathName = QFileDialog.getExistingDirectory(
self, "Choose a Directory", str(self.basePath), QFileDialog.ShowDirsOnly)
if newBasePathName != '':
# self.settings.setValue("last_projectbase_directory", newBasePathName)
self.lineEdit_basePath.setText(newBasePathName)
self.basePath = Path(newBasePathName)
def getBasePathAndProjectName(self):
return self.basePath, self.projectName | InfraView/widgets/IPProject.py | import sys
from pathlib import Path, PurePath
from PyQt5 import QtCore, QtGui
from PyQt5.QtCore import Qt, QObject, QSettings, QDir
from PyQt5.QtWidgets import (QApplication, QDialog, QGridLayout, QLabel, QLineEdit,
QPushButton, QFileDialog, QWidget, QGroupBox, QVBoxLayout,
QDialogButtonBox, QSizePolicy)
class IPProject:
basePath = None # base path projects are saved in
projectPath = None # path of actual project
dataPath = None # where locally stored data (can) be saved
beamformingResutsPath = None # path to csv files holding fstat, ba, and tracev data
detectionsPath = None # where picks will be saved
customFilterPath = None # where custom filters will be saved
homePath = None # user's home directory
stationsPath = None # where station xml files will be saved
projectName = None
projectFileName = None
def __init__(self):
self.__globalSettings = QSettings('LANL', 'InfraView')
# print(self.__globalSettings)
def makeNewProject(self):
newDialog = IPNewProjectDialog(self)
if newDialog.exec_():
self.basePath, self.projectName = newDialog.getBasePathAndProjectName()
self.projectPath = Path(str(self.basePath) + '/' + self.projectName)
self.dataPath = Path(str(self.projectPath) + '/data')
self.detectionsPath = Path(str(self.projectPath) + '/detections')
self.stationsPath = Path(str(self.projectPath) + '/stations')
self.customFilterPath = Path(str(self.projectPath) + '/customFilters')
self.beamformingResutsPath = Path(str(self.projectPath) + '/beamformingResults')
# Create the project directories
self.projectPath.mkdir(parents=True, exist_ok=True)
self.dataPath.mkdir(parents=True, exist_ok=True)
self.detectionsPath.mkdir(parents=True, exist_ok=True)
self.stationsPath.mkdir(parents=True, exist_ok=True)
self.customFilterPath.mkdir(parents=True, exist_ok=True)
self.beamformingResutsPath.mkdir(parents=True, exist_ok=True)
# Create a settings object/file for the new project and populate it with the directories
self.projectFileName = self.projectName + '.ipprj'
self.projectSettings = QSettings(str(self.projectPath) + '/' + self.projectFileName, QSettings.IniFormat)
self.projectSettings.beginGroup('Main')
self.projectSettings.setValue('projectName', str(self.projectName))
self.projectSettings.endGroup()
self.projectSettings.beginGroup('PathNames')
self.projectSettings.setValue('basePathName', str(self.basePath))
self.projectSettings.setValue('projectPathName', str(self.projectPath))
self.projectSettings.setValue('dataPathName', str(self.dataPath))
self.projectSettings.setValue('detectionsPathName', str(self.detectionsPath))
self.projectSettings.setValue('stationsPathName', str(self.stationsPath))
self.projectSettings.setValue('customFilterPathName', str(self.customFilterPath))
self.projectSettings.setValue('beamformingResultsPath', str(self.beamformingResutsPath))
self.projectSettings.endGroup()
return True
else:
return False
def loadProject(self):
mydirectory = self.__globalSettings.value('last_baseProject_directory', self.homePath)
ipprjPathname, _ = QFileDialog.getOpenFileName(
caption='Open InfraView Project', directory=mydirectory, filter='InfraView Project Files (*.ipprj)')
if ipprjPathname:
self.projectSettings = QSettings(ipprjPathname, QSettings.IniFormat)
self.projectSettings.beginGroup('Main')
self.projectName = self.projectSettings.value('projectName')
self.projectFileName = self.projectName + '.ipprj'
self.projectSettings.endGroup()
self.projectSettings.beginGroup('PathNames')
self.basePath = Path(self.projectSettings.value('basePathName'))
self.projectPath = Path(self.projectSettings.value('projectPathName'))
self.dataPath = Path(self.projectSettings.value('dataPathName'))
self.detectionsPath = Path(self.projectSettings.value('detectionsPathName'))
self.stationsPath = Path(self.projectSettings.value('stationsPathName'))
self.customFilterPath = Path(self.projectSettings.value('customFilterPathName'))
# when opening old projects, newer settings might not be present
if self.projectSettings.value('beamformingResultsPath') is None:
self.beamformingResutsPath = Path(str(self.projectPath) + '/beamformingResults')
else:
self.beamformingResutsPath = Path(self.projectSettings.value('beamformingResultsPath'))
self.projectSettings.endGroup()
return True
else:
return False
def get_basePath(self):
return self.basePath
def get_projectPath(self):
return self.projectPath
def get_dataPath(self):
return self.dataPath
def set_dataPath(self, path):
self.dataPath = path
def get_detectionsPath(self):
return self.detectionsPath
def get_stationsPath(self):
return self.stationsPath
def get_customFilterPath(self):
return self.customFilterPath
def get_projectName(self):
return self.projectName
def get_projectFileName(self):
return self.projectFileName
def get_beamformResultsPath(self):
return self.beamformingResutsPath
def clear(self):
self.basePath = None # base path projects are saved in
self.projectPath = None # path of actual project
self.dataPath = None # where locally stored data (can) be saved
self.detectionsPath = None # where picks will be saved
self.stationsPath = None # where exported picks will be saved
self.customFilterPath = None # where custom filters will be saved
self.homePath = None # user's home directory
self.projectName = None
self.projectFileName = None
self.beamformingResutsPath = None # beamforming results directory
class IPNewProjectDialog(QDialog):
basePath = None
projectName = None
def __init__(self, parent):
super().__init__()
homePath = Path.home()
self.basePath = Path(homePath, 'IPProjects')
self.buildUI()
def buildUI(self):
self.setWindowTitle('Create a New Project')
label_projectName = QLabel(self.tr('Project Name: '))
self.lineEdit_projectName = QLineEdit()
self.lineEdit_projectName.textChanged.connect(self.updateProjectPath)
label_basePath = QLabel(self.tr('Base Directory: '))
self.lineEdit_basePath = QLineEdit(str(self.basePath))
self.lineEdit_basePath.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Preferred)
self.lineEdit_basePath.setMinimumWidth(400)
self.lineEdit_basePath.textChanged.connect(self.updateProjectPath)
button_basePathEdit = QPushButton('Edit...')
button_basePathEdit.clicked.connect(self.directoryDialog)
self.label_projectDirectory = QLabel('Project Directory: ')
self.label_projectDirectory_value = QLabel(str(self.basePath) + '/' + self.lineEdit_projectName.text())
buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel, Qt.Horizontal, self)
buttons.accepted.connect(self.accept)
buttons.rejected.connect(self.reject)
gridWidget = QWidget()
gridlayout = QGridLayout()
gridlayout.addWidget(label_projectName, 0, 0)
gridlayout.addWidget(self.lineEdit_projectName, 0, 1)
gridlayout.addWidget(label_basePath, 1, 0)
gridlayout.addWidget(self.lineEdit_basePath, 1, 1)
gridlayout.addWidget(button_basePathEdit, 1, 2)
gridlayout.addWidget(self.label_projectDirectory, 2, 0)
gridlayout.addWidget(self.label_projectDirectory_value, 2, 1)
gridWidget.setLayout(gridlayout)
mainLayout = QVBoxLayout()
mainLayout.addWidget(gridWidget)
mainLayout.addWidget(buttons)
self.setLayout(mainLayout)
def updateProjectPath(self):
self.basePath = self.lineEdit_basePath.text()
self.projectName = self.lineEdit_projectName.text()
self.label_projectDirectory_value.setText(self.lineEdit_basePath.text() + '/' + self.lineEdit_projectName.text())
def directoryDialog(self):
newBasePathName = QFileDialog.getExistingDirectory(
self, "Choose a Directory", str(self.basePath), QFileDialog.ShowDirsOnly)
if newBasePathName != '':
# self.settings.setValue("last_projectbase_directory", newBasePathName)
self.lineEdit_basePath.setText(newBasePathName)
self.basePath = Path(newBasePathName)
def getBasePathAndProjectName(self):
return self.basePath, self.projectName | 0.354098 | 0.073264 |
from db.function.ExistProfil import ExistProfil
from db.function.Vehicule import Vehicule, get_all_vehicule
from db.function.Querry import Querry
from Game.image.create import SynoImages
from db.files.vhl import required, calculation
class Syno():
def __init__(self):
self.syno = [0, 0, 0, 0, 0, 0] # [HDR, SOFF, OFF, MED, INF, CTA]
def update_syno(self):
service = Querry("SELECT * FROM service")
for data in service:
i, uid, name, starttime, cta = data
player = ExistProfil(uid)
if not cta:self.syno[player.hierarchie-1] += 1
if cta: self.syno[5] += 1
return self
def updatevhl(self, vhl):
vhl = Vehicule(vhl)
if vhl.statut != 0 and vhl.statut != 1: return 'stop'
vhl_required = required[vhl.vehicule]
vhl_calcualtion = calculation[vhl.vehicule]
syno_dict = {"hdr":self.syno[0], "soff":self.syno[1], "off":self.syno[2], "inf":self.syno[3], "med":self.syno[4], "":0}
syno_vhl = {"hdr":0, "soff":0, "off":0, "inf":0, "med":0}
for key in vhl_calcualtion.keys():
for i in range(len(vhl_calcualtion[key])):
syno_vhl[key] += syno_dict[vhl_calcualtion[key][i]]
number_required, number_unit = 0, 0
for i in vhl_required.values(): number_required += i
for i in syno_vhl.values(): number_unit += i
if number_required <= number_unit:
syno_vhl_inverted = []
for a in syno_vhl.items():
syno_vhl_inverted.insert(0, (a[0], a[1]))
syno_vhl_inverted = dict(syno_vhl_inverted)
result = 0
for i in syno_vhl_inverted.keys():
result = syno_vhl[i] - (vhl_required[i] - result)
if result < 0:
break
if result >= 0 : vhl.statut = 1
else:
vhl.statut = 0
else:
vhl.statut = 0
vhl.save()
def updatevhls(self):
for vhl in get_all_vehicule(): self.updatevhl(vhl[0])
def createsyno(self):
SynoImages(self.syno).uptade()
def run(self):
self.update_syno()
self.updatevhls()
self.createsyno() | src/Game/Syno/Syno.py | from db.function.ExistProfil import ExistProfil
from db.function.Vehicule import Vehicule, get_all_vehicule
from db.function.Querry import Querry
from Game.image.create import SynoImages
from db.files.vhl import required, calculation
class Syno():
def __init__(self):
self.syno = [0, 0, 0, 0, 0, 0] # [HDR, SOFF, OFF, MED, INF, CTA]
def update_syno(self):
service = Querry("SELECT * FROM service")
for data in service:
i, uid, name, starttime, cta = data
player = ExistProfil(uid)
if not cta:self.syno[player.hierarchie-1] += 1
if cta: self.syno[5] += 1
return self
def updatevhl(self, vhl):
vhl = Vehicule(vhl)
if vhl.statut != 0 and vhl.statut != 1: return 'stop'
vhl_required = required[vhl.vehicule]
vhl_calcualtion = calculation[vhl.vehicule]
syno_dict = {"hdr":self.syno[0], "soff":self.syno[1], "off":self.syno[2], "inf":self.syno[3], "med":self.syno[4], "":0}
syno_vhl = {"hdr":0, "soff":0, "off":0, "inf":0, "med":0}
for key in vhl_calcualtion.keys():
for i in range(len(vhl_calcualtion[key])):
syno_vhl[key] += syno_dict[vhl_calcualtion[key][i]]
number_required, number_unit = 0, 0
for i in vhl_required.values(): number_required += i
for i in syno_vhl.values(): number_unit += i
if number_required <= number_unit:
syno_vhl_inverted = []
for a in syno_vhl.items():
syno_vhl_inverted.insert(0, (a[0], a[1]))
syno_vhl_inverted = dict(syno_vhl_inverted)
result = 0
for i in syno_vhl_inverted.keys():
result = syno_vhl[i] - (vhl_required[i] - result)
if result < 0:
break
if result >= 0 : vhl.statut = 1
else:
vhl.statut = 0
else:
vhl.statut = 0
vhl.save()
def updatevhls(self):
for vhl in get_all_vehicule(): self.updatevhl(vhl[0])
def createsyno(self):
SynoImages(self.syno).uptade()
def run(self):
self.update_syno()
self.updatevhls()
self.createsyno() | 0.279828 | 0.216167 |
from algorithms.base_algorithm import Algorithm, AlgorithmException
class BaseAlgorithmMock(Algorithm):
def __init__(self, path=None, parameters=None):
self.called_save = False
self.called_train = False
self.called_load = False
self.save_path = None
if parameters:
self.parameters = parameters
if path:
self.called_load = True
def save(self, path):
self.called_save = True
self.save_path = path
def train(self, samples, labels):
self.called_train = True
class AlgorithmMock1(BaseAlgorithmMock):
"""A docstring."""
@classmethod
def get_parameters(cls):
return {
'some_name': {
'description': "Something.",
'type': int,
'values': [1, 2, 3]
}
}
def predict(self, data):
return False, {"something": "Somethong"}
class AlgorithmMock2(BaseAlgorithmMock):
"""Docstring 2."""
multilabel = True
@classmethod
def get_parameters(cls):
return {
'param1': {
'description': "Param 1.",
'type': int,
'values': [1, 2, 3]
},
'param2': {
'description': "Param 2.",
'type': str,
'values': ['a', 'b', 'c']
}
}
def train(self, samples, labels):
super().train(samples, labels)
self.num_classes = max(labels)
def predict(self, data):
return 0, {"something": 0}
class RaisingAlgorithmExceptionMock(BaseAlgorithmMock):
multilabel = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'path' in kwargs:
raise AlgorithmException('load exception')
@classmethod
def get_parameters(cls):
return {}
def train(self, samples, labels):
super().train(samples, labels)
raise AlgorithmException("train exception")
def predict(self, sample):
super().predict(sample)
raise AlgorithmException('predict exception')
def save(self, path):
super().save(path)
raise AlgorithmException('path exception')
TEST_ALG_DICT = {
'first_mock': AlgorithmMock1,
'second_mock': AlgorithmMock2,
'raise_mock': RaisingAlgorithmExceptionMock
} | AlgorithmAnalyzer/Backend/algorithms/tests/mocks.py | from algorithms.base_algorithm import Algorithm, AlgorithmException
class BaseAlgorithmMock(Algorithm):
def __init__(self, path=None, parameters=None):
self.called_save = False
self.called_train = False
self.called_load = False
self.save_path = None
if parameters:
self.parameters = parameters
if path:
self.called_load = True
def save(self, path):
self.called_save = True
self.save_path = path
def train(self, samples, labels):
self.called_train = True
class AlgorithmMock1(BaseAlgorithmMock):
"""A docstring."""
@classmethod
def get_parameters(cls):
return {
'some_name': {
'description': "Something.",
'type': int,
'values': [1, 2, 3]
}
}
def predict(self, data):
return False, {"something": "Somethong"}
class AlgorithmMock2(BaseAlgorithmMock):
"""Docstring 2."""
multilabel = True
@classmethod
def get_parameters(cls):
return {
'param1': {
'description': "Param 1.",
'type': int,
'values': [1, 2, 3]
},
'param2': {
'description': "Param 2.",
'type': str,
'values': ['a', 'b', 'c']
}
}
def train(self, samples, labels):
super().train(samples, labels)
self.num_classes = max(labels)
def predict(self, data):
return 0, {"something": 0}
class RaisingAlgorithmExceptionMock(BaseAlgorithmMock):
multilabel = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'path' in kwargs:
raise AlgorithmException('load exception')
@classmethod
def get_parameters(cls):
return {}
def train(self, samples, labels):
super().train(samples, labels)
raise AlgorithmException("train exception")
def predict(self, sample):
super().predict(sample)
raise AlgorithmException('predict exception')
def save(self, path):
super().save(path)
raise AlgorithmException('path exception')
TEST_ALG_DICT = {
'first_mock': AlgorithmMock1,
'second_mock': AlgorithmMock2,
'raise_mock': RaisingAlgorithmExceptionMock
} | 0.799794 | 0.259462 |
import transaction
import logging
from sqlalchemy.orm import aliased
from ..models.model import SysOrg, SysUser, SysUserOrg, HasPad, SysUserRole
from ..common.dateutils import date_now
from ..common.paginator import Paginator
logger = logging.getLogger(__name__)
def find_branch(dbs, user_org_id=None, org_type=None):
"""
获取机构列表
:param dbs:
:param user_org_id:
:param org_type:0公司,1部门
:return:
"""
branches = []
sql = 'WITH RECURSIVE r AS ( SELECT * FROM brms.sys_org '
if user_org_id:
sql += ' WHERE id = %s' % user_org_id
else:
sql += ' WHERE id = 1'
sql += ' union ALL SELECT sys_org.* FROM brms.sys_org, r WHERE sys_org.parent_id = r.id '
if org_type:
sql += ' and sys_org.org_type = \'' + org_type + '\''
sql += ') SELECT id,org_name,parent_id FROM r ORDER BY id'
curs = dbs.execute(sql)
for rec in curs:
branch = {}
branch['org_id'] = rec[0]
branch['org_name'] = rec[1]
branches.append(branch)
return branches
def find_branch_json(dbs, user_org_id=None, org_type=None):
"""
获取未分配的机构树
:param dbs:
:param user_org_id:
:param org_type:0公司,1部门
:return:
"""
branches = []
sql = 'WITH RECURSIVE r AS ( SELECT * FROM brms.sys_org '
if user_org_id:
sql += ' WHERE id = %s' % user_org_id
else:
sql += ' WHERE id = 1'
sql += ' union ALL SELECT sys_org.* FROM brms.sys_org, r WHERE sys_org.parent_id = r.id '
if org_type:
sql += ' and sys_org.org_type = \'' + org_type + '\''
sql += ') SELECT id,org_name,parent_id FROM r ORDER BY id'
curs = dbs.execute(sql)
for rec in curs:
branch = {}
branch['id'] = rec[0]
branch['name'] = rec[1]
branch['pId'] = rec[2]
if rec[2] == 0:
branch['open'] = True
branches.append(branch)
return branches
def find_branch_json_check(dbs, user_id, user_now=None):
"""
获取机构树
:param dbs:
:param user_id:
:param user_now:
:return:
"""
branches = []
orgs = dbs.query(SysOrg.id, SysOrg.org_name, SysOrg.parent_id).filter(SysOrg.org_type == '0').all()
# 当前的登录用户可分配的机构
user_orgs = dbs.query(SysUserOrg.org_id).filter(SysUserOrg.user_id == user_now).all()
user_org_list = []
for rec in user_orgs:
user_org_list.append(rec[0])
user_tuple = tuple(user_org_list)
# 当前勾选的用户已分配的机构
curs = dbs.query(SysUserOrg.org_id).filter(SysUserOrg.user_id == user_id).all()
for rec in orgs:
branch = {}
branch['id'] = rec[0]
branch['name'] = rec[1]
branch['pId'] = rec[2]
if rec[2] == 0:
branch['open'] = True
if rec[0] in user_tuple:
branch['doCheck'] = True
else:
branch['doCheck'] = False
branch['name'] += '(不可选)'
for org in curs:
if rec[0] == org[0]:
branch['checked'] = True
branches.append(branch)
return branches
def find_branch_json_4booking(dbs, user_id, user_org_id, tree=True):
"""
获取机构树
:param dbs:
:param user_id:
:param user_org_id:
:param tree:
:return:
"""
user_parent_org_id = find_parent_org(dbs, user_org_id)
if user_org_id != user_parent_org_id:
user_org_id = user_parent_org_id
user_orgs = dbs.query(SysUserOrg.org_id)\
.outerjoin(SysOrg, SysOrg.id == SysUserOrg.org_id)\
.filter(SysUserOrg.user_id == user_id, SysOrg.org_type == '0').all()
orgs_ids = [i.org_id for i in user_orgs]
user_orgs = dbs.query(SysOrg.id, SysOrg.org_name, SysOrg.parent_id).filter(SysOrg.id.in_(orgs_ids)).all()
org_dict = {}
for org in user_orgs:
branch = dict()
branch['id'] = org[0]
branch['name'] = org[1]
branch['pId'] = org[2]
branch['doCheck'] = True
if org[2] == 0:
branch['open'] = True
if org[0] == user_org_id:
branch['checked'] = True
org_dict[org[0]] = branch
if tree:
for org_id in orgs_ids:
find_parents(dbs, org_dict[org_id]['pId'], org_dict, is_open=(org_id == user_org_id))
return [v for k, v in org_dict.items()]
def find_parents(dbs, parent_id, org_dict, is_open=False):
"""
查找父机构并加入到字典中
:param dbs:
:param parent_id:
:param org_dict:
:param is_open:
:return:
"""
if parent_id == 0 or parent_id in org_dict.keys():
return
org = dbs.query(SysOrg.id, SysOrg.org_name, SysOrg.parent_id).filter(SysOrg.id == parent_id).first()
branch = dict()
branch['id'] = org[0]
branch['name'] = org[1] + '(不可选)'
branch['pId'] = org[2]
branch['chkDisabled'] = True
branch['open'] = is_open
org_dict[parent_id] = branch
if org[2] == 0:
return
find_parents(dbs, org[2], org_dict, is_open)
return
def find_orgs(dbs, org_name=None, parent_id=None, address=None, org_id=None, page_no=1, show_child=True):
"""
查询org列表
:param dbs:
:param org_name:
:param parent_id:
:param address:
:param org_id:
:param page_no:
:param show_child:
:return:
"""
sysorg1 = aliased(SysOrg)
orgs = dbs.query(SysOrg.id,
SysOrg.org_name,
SysOrg.org_type,
sysorg1.org_name,
SysOrg.org_manager,
SysOrg.phone,
SysOrg.address,
SysOrg.state,
SysUser.user_name,
SysOrg.create_time) \
.outerjoin(SysUser, SysUser.id == SysOrg.create_user) \
.outerjoin(sysorg1, SysOrg.parent_id == sysorg1.id)
if org_id:
if show_child:
tmp = find_branch_json(dbs, org_id)
child_org = list(map((lambda x: x['id']), tmp))
orgs = orgs.filter(SysOrg.id.in_(child_org))
else:
orgs = orgs.filter(SysOrg.id == org_id)
if org_name:
orgs = orgs.filter(SysOrg.org_name.like('%' + org_name + '%'))
if parent_id:
orgs = orgs.filter(SysOrg.parent_id == parent_id)
if address:
orgs = orgs.filter(SysOrg.address.like('%' + address + '%'))
orgs = orgs.order_by(SysOrg.create_time.desc())
results, paginator = Paginator(orgs, page_no).to_dict()
lists = []
for obj in results:
obj_id = obj[0] if obj[0] else ''
org_name = obj[1] if obj[1] else ''
org_type = obj[2] if obj[2] else ''
parent_name = obj[3] if obj[3] else ''
org_manager = obj[4] if obj[4] else ''
phone = obj[5] if obj[5] else ''
address = obj[6] if obj[6] else ''
state = obj[7] if obj[7] else ''
user_name = obj[8] if obj[8] else ''
create_time = obj[9] if obj[9] else ''
temp_dict = {
'id': obj_id,
'org_name': org_name,
'org_type': org_type,
'parent_name': parent_name,
'org_manager': org_manager,
'phone': phone,
'address': address,
'state': state,
'user_name': user_name,
'create_time': create_time
}
lists.append(temp_dict)
return lists, paginator
def find_org(dbs, org_id):
"""
:param dbs:
:param org_id:
:return:
"""
(orgs, paginator) = find_orgs(dbs, org_id=org_id)
if len(orgs) >= 1:
return orgs[0]
return None
def find_org_by_id(dbs, org_id):
"""
:param dbs:
:param org_id:
:return:
"""
org = dbs.query(SysOrg).filter(SysOrg.id == org_id).first()
if org:
return org
else:
return None
def check_org_name(dbs, org_name, parent_id):
"""
判断机构名称是否已被占用
:param dbs:
:param org_name:
:param parent_id
:return:
"""
if not org_name:
return "机构名称不能为空"
org = dbs.query(SysOrg).filter(SysOrg.org_name == org_name, SysOrg.parent_id == parent_id).first()
return "机构名称重复" if org else ""
def add(dbs, org):
"""
添加机构
:param dbs:
:param org:
:return:
"""
try:
dbs.add(org)
dbs.flush()
sys_user_org = SysUserOrg(user_id=org.create_user, org_id=org.id, create_user=org.create_user,
create_time=date_now())
dbs.merge(sys_user_org)
sys_user_org = SysUserOrg(user_id=1, org_id=org.id, create_user=org.create_user,
create_time=date_now())
dbs.merge(sys_user_org)
return ''
except Exception as e:
logger.error(e)
return '添加机构失败,请重试!'
def update(dbs, org):
"""
更新机构信息
:param dbs:
:param org:
:return:
"""
try:
with transaction.manager:
dbs.merge(org)
dbs.flush()
return ''
except Exception as e:
logger.error(e)
return '更新机构信息失败,请重试!'
def delete(dbs, org_id):
"""
删除机构,同时删除机构下用户、pad、用户的机构授权、用户的角色授权、其他用户对此机构的授权
:param dbs:
:param org_id:
:return:
"""
try:
with transaction.manager as tm:
children = dbs.query(SysOrg).filter(SysOrg.parent_id == org_id).all()
if children:
tm.abort()
return '请先删除此机构的子机构!'
dbs.query(HasPad).filter(HasPad.org_id == org_id).delete()
dbs.query(SysUserOrg).filter(SysUserOrg.org_id == org_id).delete()
users = dbs.query(SysUser).filter(SysUser.org_id == org_id).all()
if users:
for user in users:
dbs.query(SysUserOrg).filter(SysUserOrg.user_id == user.id).delete()
dbs.query(SysUserRole).filter(SysUserRole.user_id == user.id).delete()
dbs.delete(user)
dbs.query(SysOrg).filter(SysOrg.id == org_id).delete()
dbs.flush()
return ''
except Exception as e:
logger.error(e)
return '删除机构失败,请重试!'
def find_org_ids(dbs, user_org_id):
"""
获取当前用户所属机构及下属机构id
:param dbs:
:param user_org_id:
:return:
"""
branches = [] # 获取当前用户所属机构及下属机构id
sql = 'WITH RECURSIVE r AS ( SELECT * FROM brms.sys_org '
if user_org_id:
sql += ' WHERE id = %s' % user_org_id
else:
sql += ' WHERE id = 1'
sql += ' union ALL SELECT sys_org.* FROM brms.sys_org, r WHERE sys_org.parent_id = r.id ) ' \
'SELECT id,org_name,parent_id FROM r ORDER BY id'
orgs = dbs.execute(sql)
for rec in orgs:
branches.append(rec[0])
return branches
def find_org_by_user(dbs, user_id):
"""
:param dbs:
:param user_id:
:return:
"""
branches = [] # 获取当前用户所属机构及下属机构id
user_orgs = dbs.query(SysUserOrg.org_id).filter(SysUserOrg.user_id == user_id).all()
for rec in user_orgs:
branches.append(rec[0])
return branches
def find_parent_org(dbs, org_id):
org = dbs.query(SysOrg).filter(SysOrg.id == org_id).first()
if org.org_type == '0':
return org_id
else:
return find_parent_org(dbs, org.parent_id) | brms/service/org_service.py | import transaction
import logging
from sqlalchemy.orm import aliased
from ..models.model import SysOrg, SysUser, SysUserOrg, HasPad, SysUserRole
from ..common.dateutils import date_now
from ..common.paginator import Paginator
logger = logging.getLogger(__name__)
def find_branch(dbs, user_org_id=None, org_type=None):
"""
获取机构列表
:param dbs:
:param user_org_id:
:param org_type:0公司,1部门
:return:
"""
branches = []
sql = 'WITH RECURSIVE r AS ( SELECT * FROM brms.sys_org '
if user_org_id:
sql += ' WHERE id = %s' % user_org_id
else:
sql += ' WHERE id = 1'
sql += ' union ALL SELECT sys_org.* FROM brms.sys_org, r WHERE sys_org.parent_id = r.id '
if org_type:
sql += ' and sys_org.org_type = \'' + org_type + '\''
sql += ') SELECT id,org_name,parent_id FROM r ORDER BY id'
curs = dbs.execute(sql)
for rec in curs:
branch = {}
branch['org_id'] = rec[0]
branch['org_name'] = rec[1]
branches.append(branch)
return branches
def find_branch_json(dbs, user_org_id=None, org_type=None):
"""
获取未分配的机构树
:param dbs:
:param user_org_id:
:param org_type:0公司,1部门
:return:
"""
branches = []
sql = 'WITH RECURSIVE r AS ( SELECT * FROM brms.sys_org '
if user_org_id:
sql += ' WHERE id = %s' % user_org_id
else:
sql += ' WHERE id = 1'
sql += ' union ALL SELECT sys_org.* FROM brms.sys_org, r WHERE sys_org.parent_id = r.id '
if org_type:
sql += ' and sys_org.org_type = \'' + org_type + '\''
sql += ') SELECT id,org_name,parent_id FROM r ORDER BY id'
curs = dbs.execute(sql)
for rec in curs:
branch = {}
branch['id'] = rec[0]
branch['name'] = rec[1]
branch['pId'] = rec[2]
if rec[2] == 0:
branch['open'] = True
branches.append(branch)
return branches
def find_branch_json_check(dbs, user_id, user_now=None):
"""
获取机构树
:param dbs:
:param user_id:
:param user_now:
:return:
"""
branches = []
orgs = dbs.query(SysOrg.id, SysOrg.org_name, SysOrg.parent_id).filter(SysOrg.org_type == '0').all()
# 当前的登录用户可分配的机构
user_orgs = dbs.query(SysUserOrg.org_id).filter(SysUserOrg.user_id == user_now).all()
user_org_list = []
for rec in user_orgs:
user_org_list.append(rec[0])
user_tuple = tuple(user_org_list)
# 当前勾选的用户已分配的机构
curs = dbs.query(SysUserOrg.org_id).filter(SysUserOrg.user_id == user_id).all()
for rec in orgs:
branch = {}
branch['id'] = rec[0]
branch['name'] = rec[1]
branch['pId'] = rec[2]
if rec[2] == 0:
branch['open'] = True
if rec[0] in user_tuple:
branch['doCheck'] = True
else:
branch['doCheck'] = False
branch['name'] += '(不可选)'
for org in curs:
if rec[0] == org[0]:
branch['checked'] = True
branches.append(branch)
return branches
def find_branch_json_4booking(dbs, user_id, user_org_id, tree=True):
"""
获取机构树
:param dbs:
:param user_id:
:param user_org_id:
:param tree:
:return:
"""
user_parent_org_id = find_parent_org(dbs, user_org_id)
if user_org_id != user_parent_org_id:
user_org_id = user_parent_org_id
user_orgs = dbs.query(SysUserOrg.org_id)\
.outerjoin(SysOrg, SysOrg.id == SysUserOrg.org_id)\
.filter(SysUserOrg.user_id == user_id, SysOrg.org_type == '0').all()
orgs_ids = [i.org_id for i in user_orgs]
user_orgs = dbs.query(SysOrg.id, SysOrg.org_name, SysOrg.parent_id).filter(SysOrg.id.in_(orgs_ids)).all()
org_dict = {}
for org in user_orgs:
branch = dict()
branch['id'] = org[0]
branch['name'] = org[1]
branch['pId'] = org[2]
branch['doCheck'] = True
if org[2] == 0:
branch['open'] = True
if org[0] == user_org_id:
branch['checked'] = True
org_dict[org[0]] = branch
if tree:
for org_id in orgs_ids:
find_parents(dbs, org_dict[org_id]['pId'], org_dict, is_open=(org_id == user_org_id))
return [v for k, v in org_dict.items()]
def find_parents(dbs, parent_id, org_dict, is_open=False):
"""
查找父机构并加入到字典中
:param dbs:
:param parent_id:
:param org_dict:
:param is_open:
:return:
"""
if parent_id == 0 or parent_id in org_dict.keys():
return
org = dbs.query(SysOrg.id, SysOrg.org_name, SysOrg.parent_id).filter(SysOrg.id == parent_id).first()
branch = dict()
branch['id'] = org[0]
branch['name'] = org[1] + '(不可选)'
branch['pId'] = org[2]
branch['chkDisabled'] = True
branch['open'] = is_open
org_dict[parent_id] = branch
if org[2] == 0:
return
find_parents(dbs, org[2], org_dict, is_open)
return
def find_orgs(dbs, org_name=None, parent_id=None, address=None, org_id=None, page_no=1, show_child=True):
"""
查询org列表
:param dbs:
:param org_name:
:param parent_id:
:param address:
:param org_id:
:param page_no:
:param show_child:
:return:
"""
sysorg1 = aliased(SysOrg)
orgs = dbs.query(SysOrg.id,
SysOrg.org_name,
SysOrg.org_type,
sysorg1.org_name,
SysOrg.org_manager,
SysOrg.phone,
SysOrg.address,
SysOrg.state,
SysUser.user_name,
SysOrg.create_time) \
.outerjoin(SysUser, SysUser.id == SysOrg.create_user) \
.outerjoin(sysorg1, SysOrg.parent_id == sysorg1.id)
if org_id:
if show_child:
tmp = find_branch_json(dbs, org_id)
child_org = list(map((lambda x: x['id']), tmp))
orgs = orgs.filter(SysOrg.id.in_(child_org))
else:
orgs = orgs.filter(SysOrg.id == org_id)
if org_name:
orgs = orgs.filter(SysOrg.org_name.like('%' + org_name + '%'))
if parent_id:
orgs = orgs.filter(SysOrg.parent_id == parent_id)
if address:
orgs = orgs.filter(SysOrg.address.like('%' + address + '%'))
orgs = orgs.order_by(SysOrg.create_time.desc())
results, paginator = Paginator(orgs, page_no).to_dict()
lists = []
for obj in results:
obj_id = obj[0] if obj[0] else ''
org_name = obj[1] if obj[1] else ''
org_type = obj[2] if obj[2] else ''
parent_name = obj[3] if obj[3] else ''
org_manager = obj[4] if obj[4] else ''
phone = obj[5] if obj[5] else ''
address = obj[6] if obj[6] else ''
state = obj[7] if obj[7] else ''
user_name = obj[8] if obj[8] else ''
create_time = obj[9] if obj[9] else ''
temp_dict = {
'id': obj_id,
'org_name': org_name,
'org_type': org_type,
'parent_name': parent_name,
'org_manager': org_manager,
'phone': phone,
'address': address,
'state': state,
'user_name': user_name,
'create_time': create_time
}
lists.append(temp_dict)
return lists, paginator
def find_org(dbs, org_id):
"""
:param dbs:
:param org_id:
:return:
"""
(orgs, paginator) = find_orgs(dbs, org_id=org_id)
if len(orgs) >= 1:
return orgs[0]
return None
def find_org_by_id(dbs, org_id):
"""
:param dbs:
:param org_id:
:return:
"""
org = dbs.query(SysOrg).filter(SysOrg.id == org_id).first()
if org:
return org
else:
return None
def check_org_name(dbs, org_name, parent_id):
"""
判断机构名称是否已被占用
:param dbs:
:param org_name:
:param parent_id
:return:
"""
if not org_name:
return "机构名称不能为空"
org = dbs.query(SysOrg).filter(SysOrg.org_name == org_name, SysOrg.parent_id == parent_id).first()
return "机构名称重复" if org else ""
def add(dbs, org):
"""
添加机构
:param dbs:
:param org:
:return:
"""
try:
dbs.add(org)
dbs.flush()
sys_user_org = SysUserOrg(user_id=org.create_user, org_id=org.id, create_user=org.create_user,
create_time=date_now())
dbs.merge(sys_user_org)
sys_user_org = SysUserOrg(user_id=1, org_id=org.id, create_user=org.create_user,
create_time=date_now())
dbs.merge(sys_user_org)
return ''
except Exception as e:
logger.error(e)
return '添加机构失败,请重试!'
def update(dbs, org):
"""
更新机构信息
:param dbs:
:param org:
:return:
"""
try:
with transaction.manager:
dbs.merge(org)
dbs.flush()
return ''
except Exception as e:
logger.error(e)
return '更新机构信息失败,请重试!'
def delete(dbs, org_id):
"""
删除机构,同时删除机构下用户、pad、用户的机构授权、用户的角色授权、其他用户对此机构的授权
:param dbs:
:param org_id:
:return:
"""
try:
with transaction.manager as tm:
children = dbs.query(SysOrg).filter(SysOrg.parent_id == org_id).all()
if children:
tm.abort()
return '请先删除此机构的子机构!'
dbs.query(HasPad).filter(HasPad.org_id == org_id).delete()
dbs.query(SysUserOrg).filter(SysUserOrg.org_id == org_id).delete()
users = dbs.query(SysUser).filter(SysUser.org_id == org_id).all()
if users:
for user in users:
dbs.query(SysUserOrg).filter(SysUserOrg.user_id == user.id).delete()
dbs.query(SysUserRole).filter(SysUserRole.user_id == user.id).delete()
dbs.delete(user)
dbs.query(SysOrg).filter(SysOrg.id == org_id).delete()
dbs.flush()
return ''
except Exception as e:
logger.error(e)
return '删除机构失败,请重试!'
def find_org_ids(dbs, user_org_id):
"""
获取当前用户所属机构及下属机构id
:param dbs:
:param user_org_id:
:return:
"""
branches = [] # 获取当前用户所属机构及下属机构id
sql = 'WITH RECURSIVE r AS ( SELECT * FROM brms.sys_org '
if user_org_id:
sql += ' WHERE id = %s' % user_org_id
else:
sql += ' WHERE id = 1'
sql += ' union ALL SELECT sys_org.* FROM brms.sys_org, r WHERE sys_org.parent_id = r.id ) ' \
'SELECT id,org_name,parent_id FROM r ORDER BY id'
orgs = dbs.execute(sql)
for rec in orgs:
branches.append(rec[0])
return branches
def find_org_by_user(dbs, user_id):
"""
:param dbs:
:param user_id:
:return:
"""
branches = [] # 获取当前用户所属机构及下属机构id
user_orgs = dbs.query(SysUserOrg.org_id).filter(SysUserOrg.user_id == user_id).all()
for rec in user_orgs:
branches.append(rec[0])
return branches
def find_parent_org(dbs, org_id):
org = dbs.query(SysOrg).filter(SysOrg.id == org_id).first()
if org.org_type == '0':
return org_id
else:
return find_parent_org(dbs, org.parent_id) | 0.236869 | 0.146728 |
import logging
from django.contrib import admin
from django.core.urlresolvers import reverse, NoReverseMatch
logger = logging.getLogger(__name__)
def patch_admin_context(request, valid, invalid):
"""
If there is no user, or the user is not authenticated, the
context will never contain ``valid``.
If the :class:`~django.contrib.admin.AdminSite` in use isn't
the default ``django.contrib.admin.site``, it will also
fail (being unable to reverse the default admin), which is
hopefully fine, because you should probably handle things
yourself, you magical person.
.. versionadded:: 0.8.1
Hoisted functionality required for
:func:`adminlinks.context_processors.force_admin_popups`
and :func:`adminlinks.context_processors.fix_admin_popups` into
a separate function, which tests whether to apply the context.
:return: ``valid`` or ``invalid`` parameter.
:rtype: dictionary.
"""
if not hasattr(request, 'user'):
logger.debug("No user on request, probably don't need to fix popups")
return invalid
if not request.user.is_authenticated():
logger.debug("user is anonymous; no point trying to fix popups as "
"they're not signed in.")
return invalid
try:
url_prefix = reverse('%s:index' % admin.site.name)
except NoReverseMatch as e:
logger.info('admin is not mounted')
return invalid
if not request.path.startswith(url_prefix):
logger.debug("Request path {path} is not within the admin "
"mounted under {admin}".format(path=request.path,
admin=url_prefix))
return invalid
return valid
def force_admin_popups(request):
"""
Should you desire it, you can force the entire admin to behave
as if it were in a popup. This may be useful if you're exposing
the entire thing as a frontend-edited site.
It forces all of the admin to believe that the request included
`_popup=1` (or `pop=1` for the changelist in `Django_` < 1.6)
and thus hides the header, breadcrumbs etc.
It also keeps track of whether or not it was really requested
via a popup, by populating the context with ``is_really_popup``,
and it also detects whether the view is supposed to respond by
closing a modal window on success by putting ``will_autoclose``
into the context.
.. versionadded:: 0.8.1
Previously this was known as
:func:`adminlinks.context_processors.fix_admin_popups`, even though it
didn't really *fix* anything.
.. note::
If there is no user, or the user is not authenticated, the
context will never contain any of the documented keys.
"""
valid_value = {'is_popup': True, 'is_admin_view': True,
'is_really_popup': '_popup' in request.REQUEST or
'pop' in request.GET,
'will_autoclose': '_autoclose' in request.REQUEST}
invalid_value = {}
return patch_admin_context(request=request, valid=valid_value,
invalid=invalid_value)
def fix_admin_popups(request):
"""
Should you desire it, you can force the entire admin to behave
as if it were in a popup. This may be useful if you're exposing
the entire thing as a frontend-edited site.
It forces all of the admin to believe that the request included
`_popup=1` (or `pop=1` for the changelist in `Django_` < 1.6)
and thus hides the header, breadcrumbs etc.
It also keeps track of whether or not it was really requested
via a popup, by populating the context with ``is_really_popup``,
and it also detects whether the view is supposed to respond by
closing a modal window on success by putting ``will_autoclose``
into the context.
.. versionchanged:: 0.8.1
Previously the function
:func:`adminlinks.context_processors.force_admin_popups` used this
name.
.. note::
If there is no user, or the user is not authenticated, the
context will never contain any of the documented keys.
"""
valid_value = {'is_popup': '_popup' in request.REQUEST or
'pop' in request.GET}
invalid_value = {}
return patch_admin_context(request=request, valid=valid_value,
invalid=invalid_value) | adminlinks/context_processors.py | import logging
from django.contrib import admin
from django.core.urlresolvers import reverse, NoReverseMatch
logger = logging.getLogger(__name__)
def patch_admin_context(request, valid, invalid):
"""
If there is no user, or the user is not authenticated, the
context will never contain ``valid``.
If the :class:`~django.contrib.admin.AdminSite` in use isn't
the default ``django.contrib.admin.site``, it will also
fail (being unable to reverse the default admin), which is
hopefully fine, because you should probably handle things
yourself, you magical person.
.. versionadded:: 0.8.1
Hoisted functionality required for
:func:`adminlinks.context_processors.force_admin_popups`
and :func:`adminlinks.context_processors.fix_admin_popups` into
a separate function, which tests whether to apply the context.
:return: ``valid`` or ``invalid`` parameter.
:rtype: dictionary.
"""
if not hasattr(request, 'user'):
logger.debug("No user on request, probably don't need to fix popups")
return invalid
if not request.user.is_authenticated():
logger.debug("user is anonymous; no point trying to fix popups as "
"they're not signed in.")
return invalid
try:
url_prefix = reverse('%s:index' % admin.site.name)
except NoReverseMatch as e:
logger.info('admin is not mounted')
return invalid
if not request.path.startswith(url_prefix):
logger.debug("Request path {path} is not within the admin "
"mounted under {admin}".format(path=request.path,
admin=url_prefix))
return invalid
return valid
def force_admin_popups(request):
"""
Should you desire it, you can force the entire admin to behave
as if it were in a popup. This may be useful if you're exposing
the entire thing as a frontend-edited site.
It forces all of the admin to believe that the request included
`_popup=1` (or `pop=1` for the changelist in `Django_` < 1.6)
and thus hides the header, breadcrumbs etc.
It also keeps track of whether or not it was really requested
via a popup, by populating the context with ``is_really_popup``,
and it also detects whether the view is supposed to respond by
closing a modal window on success by putting ``will_autoclose``
into the context.
.. versionadded:: 0.8.1
Previously this was known as
:func:`adminlinks.context_processors.fix_admin_popups`, even though it
didn't really *fix* anything.
.. note::
If there is no user, or the user is not authenticated, the
context will never contain any of the documented keys.
"""
valid_value = {'is_popup': True, 'is_admin_view': True,
'is_really_popup': '_popup' in request.REQUEST or
'pop' in request.GET,
'will_autoclose': '_autoclose' in request.REQUEST}
invalid_value = {}
return patch_admin_context(request=request, valid=valid_value,
invalid=invalid_value)
def fix_admin_popups(request):
"""
Should you desire it, you can force the entire admin to behave
as if it were in a popup. This may be useful if you're exposing
the entire thing as a frontend-edited site.
It forces all of the admin to believe that the request included
`_popup=1` (or `pop=1` for the changelist in `Django_` < 1.6)
and thus hides the header, breadcrumbs etc.
It also keeps track of whether or not it was really requested
via a popup, by populating the context with ``is_really_popup``,
and it also detects whether the view is supposed to respond by
closing a modal window on success by putting ``will_autoclose``
into the context.
.. versionchanged:: 0.8.1
Previously the function
:func:`adminlinks.context_processors.force_admin_popups` used this
name.
.. note::
If there is no user, or the user is not authenticated, the
context will never contain any of the documented keys.
"""
valid_value = {'is_popup': '_popup' in request.REQUEST or
'pop' in request.GET}
invalid_value = {}
return patch_admin_context(request=request, valid=valid_value,
invalid=invalid_value) | 0.566858 | 0.112065 |
from __future__ import absolute_import
import hashlib
import json
import logging
from talos.common import cache
from wecube_plugins_itsdangerous.apps.processor import detector
from wecube_plugins_itsdangerous.common import reader
from wecube_plugins_itsdangerous.common import scope
from wecube_plugins_itsdangerous.db import resource
LOG = logging.getLogger(__name__)
class Policy(resource.Policy):
pass
class Rule(resource.Rule):
pass
class MatchParam(resource.MatchParam):
pass
class Subject(resource.Subject):
pass
class Target(resource.Target):
pass
class ServiceScript(resource.ServiceScript):
pass
class BoxManage(resource.BoxManage):
pass
class Box(resource.Box):
def _get_rules(self, data, boxes=None):
boxes = boxes or self.list(filters={'policy.enabled': 1, 'subject.enabled': 1})
rules = {}
hasher = hashlib.sha256()
hasher.update(json.dumps(data).encode('utf-8'))
digest = hasher.hexdigest()
LOG.debug('scope test with data - %s ...', str(data)[:4096])
for b in boxes:
LOG.debug('scope test of box[%s - %s]', b['id'], b['name'])
subject_included = False
for target in b['subject']['targets']:
target_included = True
# target with the same data is cached
key = 'scope/target/%s/data/%s' % (target['id'], digest)
cached = cache.get(key, 30)
if cache.validate(cached):
target_included = cached
LOG.debug('scope test of target[%s - %s]: %s', target['id'], target['name'], ('accepted' if cached else 'rejected'))
else:
LOG.debug('scope test of target[%s - %s]', target['id'], target['name'])
if target['enabled']:
if target['args_scope'] is not None:
target_included = scope.JsonScope(target['args_scope']).is_match(data)
else:
target_included = True
if target_included:
LOG.debug('args scope: accepted')
if target['entity_scope'] is not None:
target_included = scope.WeCMDBScope(target['entity_scope']).is_match(
data['entityInstances'])
else:
target_included = True
if target_included:
LOG.debug('entity scope: accepted')
else:
LOG.debug('entity scope: rejected')
else:
LOG.debug('args scope: rejected')
else:
LOG.debug('target: disabled')
target_included = False
cache.set(key, target_included)
if target_included:
subject_included = True
break
if subject_included:
# extend box rules(enabled)
for rule in b['policy']['rules']:
if rule['enabled']:
rules[rule['id']] = rule
LOG.debug('scope test of box[%s - %s]: accepted, rules: %s', b['id'], b['name'], list(rules.keys()))
else:
LOG.debug('scope test of box[%s - %s]: rejected', b['id'], b['name'])
return list(rules.values())
def _rule_grouping(self, rules):
# {'filter': [r1, r2], 'cli': [r3], 'sql/text/fulltext': [rx...]}
results = {}
for r in rules:
rs = results.setdefault(r['match_type'], [])
rs.append(r)
return results
def check(self, data, boxes=None):
'''
data: {
(Optional - JsonScope check)"serviceName": "xxx",
(Optional - JsonScope check)"inputParams": {...service input params},
(Must - script check)"scripts": [{"type": None/"sql"/"shell", "content": "...", "name": "additional name info"}],
(Must - WeCMDBScope check)"entityInstances": [{"guid": "xxx"}, {...}]}
'''
results = []
scripts = data['scripts']
for item in scripts:
script_name = item.get('name', '') or ''
script_content = item.get('content', '') or ''
script_type = item.get('type', None)
rules = self._get_rules(data, boxes=boxes)
rules = self._rule_grouping(rules)
for key, values in rules.items():
script_results = []
if not script_type:
script_type = reader.guess(script_content) or 'text'
if key == 'filter':
script_results = detector.JsonFilterDetector(data, values).check()
elif key == 'cli' and script_type == 'shell':
script_results = detector.BashCliDetector(script_content, values).check()
elif key == 'sql' and script_type == 'sql':
script_results = detector.SqlDetector(script_content, values).check()
elif key == 'text':
script_results = detector.LineTextDetector(script_content, values).check()
elif key == 'fulltext':
script_results = detector.FullTextDetector(script_content, values).check()
for r in script_results:
r['script_name'] = script_name
results.extend(script_results)
return results | wecube_plugins_itsdangerous/apps/processor/api.py |
from __future__ import absolute_import
import hashlib
import json
import logging
from talos.common import cache
from wecube_plugins_itsdangerous.apps.processor import detector
from wecube_plugins_itsdangerous.common import reader
from wecube_plugins_itsdangerous.common import scope
from wecube_plugins_itsdangerous.db import resource
LOG = logging.getLogger(__name__)
class Policy(resource.Policy):
pass
class Rule(resource.Rule):
pass
class MatchParam(resource.MatchParam):
pass
class Subject(resource.Subject):
pass
class Target(resource.Target):
pass
class ServiceScript(resource.ServiceScript):
pass
class BoxManage(resource.BoxManage):
pass
class Box(resource.Box):
def _get_rules(self, data, boxes=None):
boxes = boxes or self.list(filters={'policy.enabled': 1, 'subject.enabled': 1})
rules = {}
hasher = hashlib.sha256()
hasher.update(json.dumps(data).encode('utf-8'))
digest = hasher.hexdigest()
LOG.debug('scope test with data - %s ...', str(data)[:4096])
for b in boxes:
LOG.debug('scope test of box[%s - %s]', b['id'], b['name'])
subject_included = False
for target in b['subject']['targets']:
target_included = True
# target with the same data is cached
key = 'scope/target/%s/data/%s' % (target['id'], digest)
cached = cache.get(key, 30)
if cache.validate(cached):
target_included = cached
LOG.debug('scope test of target[%s - %s]: %s', target['id'], target['name'], ('accepted' if cached else 'rejected'))
else:
LOG.debug('scope test of target[%s - %s]', target['id'], target['name'])
if target['enabled']:
if target['args_scope'] is not None:
target_included = scope.JsonScope(target['args_scope']).is_match(data)
else:
target_included = True
if target_included:
LOG.debug('args scope: accepted')
if target['entity_scope'] is not None:
target_included = scope.WeCMDBScope(target['entity_scope']).is_match(
data['entityInstances'])
else:
target_included = True
if target_included:
LOG.debug('entity scope: accepted')
else:
LOG.debug('entity scope: rejected')
else:
LOG.debug('args scope: rejected')
else:
LOG.debug('target: disabled')
target_included = False
cache.set(key, target_included)
if target_included:
subject_included = True
break
if subject_included:
# extend box rules(enabled)
for rule in b['policy']['rules']:
if rule['enabled']:
rules[rule['id']] = rule
LOG.debug('scope test of box[%s - %s]: accepted, rules: %s', b['id'], b['name'], list(rules.keys()))
else:
LOG.debug('scope test of box[%s - %s]: rejected', b['id'], b['name'])
return list(rules.values())
def _rule_grouping(self, rules):
# {'filter': [r1, r2], 'cli': [r3], 'sql/text/fulltext': [rx...]}
results = {}
for r in rules:
rs = results.setdefault(r['match_type'], [])
rs.append(r)
return results
def check(self, data, boxes=None):
'''
data: {
(Optional - JsonScope check)"serviceName": "xxx",
(Optional - JsonScope check)"inputParams": {...service input params},
(Must - script check)"scripts": [{"type": None/"sql"/"shell", "content": "...", "name": "additional name info"}],
(Must - WeCMDBScope check)"entityInstances": [{"guid": "xxx"}, {...}]}
'''
results = []
scripts = data['scripts']
for item in scripts:
script_name = item.get('name', '') or ''
script_content = item.get('content', '') or ''
script_type = item.get('type', None)
rules = self._get_rules(data, boxes=boxes)
rules = self._rule_grouping(rules)
for key, values in rules.items():
script_results = []
if not script_type:
script_type = reader.guess(script_content) or 'text'
if key == 'filter':
script_results = detector.JsonFilterDetector(data, values).check()
elif key == 'cli' and script_type == 'shell':
script_results = detector.BashCliDetector(script_content, values).check()
elif key == 'sql' and script_type == 'sql':
script_results = detector.SqlDetector(script_content, values).check()
elif key == 'text':
script_results = detector.LineTextDetector(script_content, values).check()
elif key == 'fulltext':
script_results = detector.FullTextDetector(script_content, values).check()
for r in script_results:
r['script_name'] = script_name
results.extend(script_results)
return results | 0.482429 | 0.137388 |
import torch
from torch import nn
import torch.nn.functional as F
from .build import BACKBONE_REGISTRY
from .backbone import Backbone
from detectron2.modeling import ShapeSpec
class ConvolutionalLayer(nn.Module):
def __init__(self, in_channels, out_channels, kernal_size, stride, padding):
super(ConvolutionalLayer, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernal_size, stride, padding),
nn.BatchNorm2d(out_channels),
nn.LeakyReLU(0.1)
)
def forward(self, x):
return self.conv(x)
class ResidualLayer(nn.Module):
def __init__(self, in_channels):
super(ResidualLayer, self).__init__()
self.reseblock = nn.Sequential(
ConvolutionalLayer(in_channels, in_channels // 2, kernal_size=1, stride=1, padding=0),
ConvolutionalLayer(in_channels // 2, in_channels, kernal_size=3, stride=1, padding=1)
)
def forward(self, x):
return x + self.reseblock(x)
class DownSampleLayer(nn.Module):
def __init__(self, in_channels, out_channels):
super(DownSampleLayer, self).__init__()
self.conv = nn.Sequential(
ConvolutionalLayer(in_channels, out_channels, kernal_size=3, stride=2, padding=1)
)
def forward(self, x):
return self.conv(x)
class UpSampleLayer(nn.Module):
def __init__(self):
super(UpSampleLayer, self).__init__()
def forward(self, x):
return F.interpolate(x, scale_factor=2, mode='nearest')
class ConvolutionalSetLayer(nn.Module):
def __init__(self, in_channel, out_channel):
super(ConvolutionalSetLayer, self).__init__()
self.conv = nn.Sequential(
ConvolutionalLayer(in_channel, out_channel, kernal_size=1, stride=1, padding=0),
ConvolutionalLayer(out_channel, in_channel, kernal_size=3, stride=1, padding=1),
ConvolutionalLayer(in_channel, out_channel, kernal_size=1, stride=1, padding=0),
ConvolutionalLayer(out_channel, in_channel, kernal_size=3, stride=1, padding=1),
ConvolutionalLayer(in_channel, out_channel, kernal_size=1, stride=1, padding=0)
)
def forward(self, x):
return self.conv(x)
class DarkNet53(Backbone):
def __init__(self):
super(DarkNet53, self).__init__()
self.feature_52 = nn.Sequential(
ConvolutionalLayer(3, 32, 3, 1, 1),
DownSampleLayer(32, 64),
ResidualLayer(64),
DownSampleLayer(64, 128),
ResidualLayer(128),
ResidualLayer(128),
DownSampleLayer(128, 256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256)
)
self.feature_26 = nn.Sequential(
DownSampleLayer(256, 512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
)
self.feature_13 = nn.Sequential(
DownSampleLayer(512, 1024),
ResidualLayer(1024),
ResidualLayer(1024),
ResidualLayer(1024),
ResidualLayer(1024)
)
self.convolset_13 = nn.Sequential(
ConvolutionalSetLayer(1024, 512)
)
self.convolset_26 = nn.Sequential(
ConvolutionalSetLayer(768, 256)
)
self.convolset_52 = nn.Sequential(
ConvolutionalSetLayer(384, 128)
)
self.detection_13 = nn.Sequential(
ConvolutionalLayer(512, 1024, 3, 1, 1),
nn.Conv2d(1024, 15, 1, 1, 0)
)
self.detection_26 = nn.Sequential(
ConvolutionalLayer(256, 512, 3, 1, 1),
nn.Conv2d(512, 15, 1, 1, 0)
)
self.detection_52 = nn.Sequential(
ConvolutionalLayer(128, 256, 3, 1, 1),
nn.Conv2d(256, 15, 1, 1, 0)
)
self.up_26 = nn.Sequential(
ConvolutionalLayer(512, 256, 1, 1, 0),
UpSampleLayer()
)
self.up_52 = nn.Sequential(
ConvolutionalLayer(256, 128, 1, 1, 0),
UpSampleLayer()
)
def forward(self, x):
outputs = {}
outlist = ['5', '14']
outnames = ['res2', 'res3']
ptr = 0
for i in range(len(self.feature_52)):
x = self.feature_52._modules[str(i)](x)
if str(i) in outlist:
outputs[outnames[ptr]] = x
ptr += 1
h_26 = self.feature_26(x)
outputs['res4'] = h_26
h_13 = self.feature_13(h_26)
outputs['res5'] = h_13
return outputs
def output_shape(self):
return {'res2': ShapeSpec(channels=128, stride=4),
'res3': ShapeSpec(channels=256, stride=8),
'res4': ShapeSpec(channels=512, stride=16),
'res5': ShapeSpec(channels=1024, stride=32)}
@BACKBONE_REGISTRY.register()
def build_darknet53_backbone(cfg, input_shape):
return DarkNet53()
if __name__ == '__main__':
net = DarkNet53()
from torchsummary import summary
summary(net, (3, 224, 224))
pass | list/darknet53.py | import torch
from torch import nn
import torch.nn.functional as F
from .build import BACKBONE_REGISTRY
from .backbone import Backbone
from detectron2.modeling import ShapeSpec
class ConvolutionalLayer(nn.Module):
def __init__(self, in_channels, out_channels, kernal_size, stride, padding):
super(ConvolutionalLayer, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernal_size, stride, padding),
nn.BatchNorm2d(out_channels),
nn.LeakyReLU(0.1)
)
def forward(self, x):
return self.conv(x)
class ResidualLayer(nn.Module):
def __init__(self, in_channels):
super(ResidualLayer, self).__init__()
self.reseblock = nn.Sequential(
ConvolutionalLayer(in_channels, in_channels // 2, kernal_size=1, stride=1, padding=0),
ConvolutionalLayer(in_channels // 2, in_channels, kernal_size=3, stride=1, padding=1)
)
def forward(self, x):
return x + self.reseblock(x)
class DownSampleLayer(nn.Module):
def __init__(self, in_channels, out_channels):
super(DownSampleLayer, self).__init__()
self.conv = nn.Sequential(
ConvolutionalLayer(in_channels, out_channels, kernal_size=3, stride=2, padding=1)
)
def forward(self, x):
return self.conv(x)
class UpSampleLayer(nn.Module):
def __init__(self):
super(UpSampleLayer, self).__init__()
def forward(self, x):
return F.interpolate(x, scale_factor=2, mode='nearest')
class ConvolutionalSetLayer(nn.Module):
def __init__(self, in_channel, out_channel):
super(ConvolutionalSetLayer, self).__init__()
self.conv = nn.Sequential(
ConvolutionalLayer(in_channel, out_channel, kernal_size=1, stride=1, padding=0),
ConvolutionalLayer(out_channel, in_channel, kernal_size=3, stride=1, padding=1),
ConvolutionalLayer(in_channel, out_channel, kernal_size=1, stride=1, padding=0),
ConvolutionalLayer(out_channel, in_channel, kernal_size=3, stride=1, padding=1),
ConvolutionalLayer(in_channel, out_channel, kernal_size=1, stride=1, padding=0)
)
def forward(self, x):
return self.conv(x)
class DarkNet53(Backbone):
def __init__(self):
super(DarkNet53, self).__init__()
self.feature_52 = nn.Sequential(
ConvolutionalLayer(3, 32, 3, 1, 1),
DownSampleLayer(32, 64),
ResidualLayer(64),
DownSampleLayer(64, 128),
ResidualLayer(128),
ResidualLayer(128),
DownSampleLayer(128, 256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256),
ResidualLayer(256)
)
self.feature_26 = nn.Sequential(
DownSampleLayer(256, 512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
ResidualLayer(512),
)
self.feature_13 = nn.Sequential(
DownSampleLayer(512, 1024),
ResidualLayer(1024),
ResidualLayer(1024),
ResidualLayer(1024),
ResidualLayer(1024)
)
self.convolset_13 = nn.Sequential(
ConvolutionalSetLayer(1024, 512)
)
self.convolset_26 = nn.Sequential(
ConvolutionalSetLayer(768, 256)
)
self.convolset_52 = nn.Sequential(
ConvolutionalSetLayer(384, 128)
)
self.detection_13 = nn.Sequential(
ConvolutionalLayer(512, 1024, 3, 1, 1),
nn.Conv2d(1024, 15, 1, 1, 0)
)
self.detection_26 = nn.Sequential(
ConvolutionalLayer(256, 512, 3, 1, 1),
nn.Conv2d(512, 15, 1, 1, 0)
)
self.detection_52 = nn.Sequential(
ConvolutionalLayer(128, 256, 3, 1, 1),
nn.Conv2d(256, 15, 1, 1, 0)
)
self.up_26 = nn.Sequential(
ConvolutionalLayer(512, 256, 1, 1, 0),
UpSampleLayer()
)
self.up_52 = nn.Sequential(
ConvolutionalLayer(256, 128, 1, 1, 0),
UpSampleLayer()
)
def forward(self, x):
outputs = {}
outlist = ['5', '14']
outnames = ['res2', 'res3']
ptr = 0
for i in range(len(self.feature_52)):
x = self.feature_52._modules[str(i)](x)
if str(i) in outlist:
outputs[outnames[ptr]] = x
ptr += 1
h_26 = self.feature_26(x)
outputs['res4'] = h_26
h_13 = self.feature_13(h_26)
outputs['res5'] = h_13
return outputs
def output_shape(self):
return {'res2': ShapeSpec(channels=128, stride=4),
'res3': ShapeSpec(channels=256, stride=8),
'res4': ShapeSpec(channels=512, stride=16),
'res5': ShapeSpec(channels=1024, stride=32)}
@BACKBONE_REGISTRY.register()
def build_darknet53_backbone(cfg, input_shape):
return DarkNet53()
if __name__ == '__main__':
net = DarkNet53()
from torchsummary import summary
summary(net, (3, 224, 224))
pass | 0.922657 | 0.300861 |
import argparse
import pickle
from tqdm import tqdm
import numpy as np
import tensorflow as tf
import datasets
import hierarchical_vae
import utils
def main():
datasets_available = [f[4:] for f in dir(datasets) if f.startswith('get_') and callable(getattr(datasets, f))]
argparser = argparse.ArgumentParser()
argparser.add_argument('model_weights_path')
argparser.add_argument('--max_test_batch_size', type=int, default=1)
argparser.add_argument('--test_iwae_samples', type=int, default=5000)
argparser.add_argument('--test_iwae_batch_size', type=int, default=None)
argparser.add_argument('--test_iwhvi_samples', type=int, nargs='+', default=[0, 1, 10, 25, 50, 100, 200])
argparser.add_argument('--diagnostic_kl_batch_size', type=int, default=10)
argparser.add_argument('--evaluate_split', choices=['train', 'val', 'test'], default='test')
argparser.add_argument('--dataset', choices=datasets_available, default='dynamic_mnist')
argparser.add_argument('--datasets_dir', default='./datasets/')
hierarchical_vae.utils.add_model_args(argparser)
args = argparser.parse_args()
dataset = getattr(datasets, 'get_%s' % args.dataset)(args.datasets_dir)
sess = tf.InteractiveSession()
print('Aguments:')
for param_name, param_value in sorted(vars(args).items()):
print('--{:30}: {}'.format(param_name, param_value))
print('\n')
vae = hierarchical_vae.utils.get_model(args)
sess.run(tf.global_variables_initializer())
restorer = tf.train.Saver()
restorer.restore(sess, args.model_weights_path)
# Evaluation
data = {
'train': dataset.train,
'test': dataset.test,
'val': dataset.validation,
}[args.evaluate_split]
p_gaps = {}
for iwhvi_samples in args.test_iwhvi_samples:
x_batch_size = args.max_test_batch_size // (iwhvi_samples + 1) + 1
print('Evaluating evidence, KLs and q gap with {} on {} with M={}, K={}, batch_size={}'.format(
'IWHVI', args.evaluate_split, args.test_iwae_samples, iwhvi_samples, x_batch_size))
p_gaps = hierarchical_vae.utils.batched_calculate_p_gap(
sess, data, vae, args.test_iwae_samples, iwhvi_samples, x_batch_size,
args.n_repeats, False, args.test_iwae_batch_size, tqdm_desc='Calculating log p(x) gap')
p_gaps[iwhvi_samples] = p_gaps
print('*' * 80)
print('* {} log q(z) gap (using {} bound)'.format(args.evaluate_split, 'IWHVI'))
for k in args.test_iwhvi_samples:
print('* k = {:4} * log q(z) gap is {:.5f} (std.: {:.5f})'.format(k, np.mean(p_gaps[k]), np.std(p_gaps[k])))
print('*' * 80)
print()
print('Evaluating KL(tau||q) on {} with M={}'.format(args.evaluate_split, args.test_iwae_samples))
kls_tau_q = hierarchical_vae.utils.calculate_kl_tau_q(
sess, data, vae, args.test_iwae_samples, args.diagnostic_kl_batch_size, args.n_repeats,
tqdm_desc='Calculating KL(tau(psi)||q(psi))')
print('* The final KL(tau(psi)||q(psi)) on {}: {:.5f} (std.: {:.5f})'.format(
args.evaluate_split, np.mean(kls_tau_q), np.std(kls_tau_q)))
print('*' * 80)
if __name__ == "__main__":
main() | iwhvae_kl.py | import argparse
import pickle
from tqdm import tqdm
import numpy as np
import tensorflow as tf
import datasets
import hierarchical_vae
import utils
def main():
datasets_available = [f[4:] for f in dir(datasets) if f.startswith('get_') and callable(getattr(datasets, f))]
argparser = argparse.ArgumentParser()
argparser.add_argument('model_weights_path')
argparser.add_argument('--max_test_batch_size', type=int, default=1)
argparser.add_argument('--test_iwae_samples', type=int, default=5000)
argparser.add_argument('--test_iwae_batch_size', type=int, default=None)
argparser.add_argument('--test_iwhvi_samples', type=int, nargs='+', default=[0, 1, 10, 25, 50, 100, 200])
argparser.add_argument('--diagnostic_kl_batch_size', type=int, default=10)
argparser.add_argument('--evaluate_split', choices=['train', 'val', 'test'], default='test')
argparser.add_argument('--dataset', choices=datasets_available, default='dynamic_mnist')
argparser.add_argument('--datasets_dir', default='./datasets/')
hierarchical_vae.utils.add_model_args(argparser)
args = argparser.parse_args()
dataset = getattr(datasets, 'get_%s' % args.dataset)(args.datasets_dir)
sess = tf.InteractiveSession()
print('Aguments:')
for param_name, param_value in sorted(vars(args).items()):
print('--{:30}: {}'.format(param_name, param_value))
print('\n')
vae = hierarchical_vae.utils.get_model(args)
sess.run(tf.global_variables_initializer())
restorer = tf.train.Saver()
restorer.restore(sess, args.model_weights_path)
# Evaluation
data = {
'train': dataset.train,
'test': dataset.test,
'val': dataset.validation,
}[args.evaluate_split]
p_gaps = {}
for iwhvi_samples in args.test_iwhvi_samples:
x_batch_size = args.max_test_batch_size // (iwhvi_samples + 1) + 1
print('Evaluating evidence, KLs and q gap with {} on {} with M={}, K={}, batch_size={}'.format(
'IWHVI', args.evaluate_split, args.test_iwae_samples, iwhvi_samples, x_batch_size))
p_gaps = hierarchical_vae.utils.batched_calculate_p_gap(
sess, data, vae, args.test_iwae_samples, iwhvi_samples, x_batch_size,
args.n_repeats, False, args.test_iwae_batch_size, tqdm_desc='Calculating log p(x) gap')
p_gaps[iwhvi_samples] = p_gaps
print('*' * 80)
print('* {} log q(z) gap (using {} bound)'.format(args.evaluate_split, 'IWHVI'))
for k in args.test_iwhvi_samples:
print('* k = {:4} * log q(z) gap is {:.5f} (std.: {:.5f})'.format(k, np.mean(p_gaps[k]), np.std(p_gaps[k])))
print('*' * 80)
print()
print('Evaluating KL(tau||q) on {} with M={}'.format(args.evaluate_split, args.test_iwae_samples))
kls_tau_q = hierarchical_vae.utils.calculate_kl_tau_q(
sess, data, vae, args.test_iwae_samples, args.diagnostic_kl_batch_size, args.n_repeats,
tqdm_desc='Calculating KL(tau(psi)||q(psi))')
print('* The final KL(tau(psi)||q(psi)) on {}: {:.5f} (std.: {:.5f})'.format(
args.evaluate_split, np.mean(kls_tau_q), np.std(kls_tau_q)))
print('*' * 80)
if __name__ == "__main__":
main() | 0.557364 | 0.255505 |
import numpy as np
import vigra
from lazyflow.operators.generic import OpMultiArrayStacker
from tsdl.tools import Operator, InputSlot, OutputSlot
from tsdl.tools import build_operator
class OpSimpleCombiner(Operator):
"""
combines a list of feature operators into one (horizontally)
operators must have slots Input and Output
"""
Input = InputSlot()
Output = OutputSlot()
Valid = OutputSlot()
@classmethod
def build(cls, config, parent=None, graph=None, workingdir=None):
"""
config["operators"] = <tuple of operator classes or config dicts>
"""
to_combine = config["operators"]
operator = cls(to_combine, parent=parent, graph=graph)
return operator
def __init__(self, to_combine, *args, **kwargs):
super(OpSimpleCombiner, self).__init__(*args, **kwargs)
operators = [build_operator(item, parent=self) for item in to_combine]
combiner = OpMultiArrayStacker(parent=self)
combiner.AxisFlag.setValue('c')
combiner.Images.resize(len(operators))
for index, operator in enumerate(operators):
combiner.Images[index].connect(operator.Output)
operator.Input.connect(self.Input)
valid_combiner = OpMultiArrayStacker(parent=self)
valid_combiner.AxisFlag.setValue('c')
valid_operators = [op for op in operators if hasattr(op, "Valid")]
valid_combiner.Images.resize(len(valid_operators))
for index, operator in enumerate(valid_operators):
valid_combiner.Images[index].connect(operator.Valid)
self._combiner = combiner
self._valid_combiner = valid_combiner
self._operators = operators
self.Output.connect(combiner.Output)
def setupOutputs(self):
size = self._operators[0].Input.meta.shape[0]
self.Valid.meta.shape = (size,)
self.Valid.meta.axistags = vigra.defaultAxistags('t')
self.Valid.meta.dtype = np.uint8
def execute(self, slot, subindex, roi, result):
assert slot is self.Valid
start = roi.start[0]
stop = roi.stop[0]
valid = self._valid_combiner.Output[start:stop, :].wait()
result[:] = np.all(valid, axis=1)
def propagateDirty(self, slot, subindex, roi):
# Output is propagated internally, Valid should be static
pass
class OpChain(Operator):
"""
chains a list of feature operators (vertically)
operators must have slots Input and Output
"""
Input = InputSlot()
Output = OutputSlot()
Valid = OutputSlot()
@classmethod
def build(cls, config, parent=None, graph=None, workingdir=None):
"""
config["operators"] = <tuple of operator classes or config dicts>
"""
to_combine = config["operators"]
operator = cls(to_combine, parent=parent, graph=graph)
return operator
def __init__(self, to_combine, *args, **kwargs):
super(OpChain, self).__init__(*args, **kwargs)
next_slot = self.Input
operators = [build_operator(item, parent=self) for item in to_combine]
for operator in operators:
operator.Input.connect(next_slot)
next_slot = operator.Output
valid_combiner = OpMultiArrayStacker(parent=self)
valid_combiner.AxisFlag.setValue('c')
valid_operators = [op for op in operators if hasattr(op, "Valid")]
valid_combiner.Images.resize(len(valid_operators))
for index, operator in enumerate(valid_operators):
valid_combiner.Images[index].connect(operator.Valid)
self.Output.connect(next_slot)
self._operators = operators
self._valid_combiner = valid_combiner
def setupOutputs(self):
size = self._operators[0].Input.meta.shape[0]
self.Valid.meta.shape = (size,)
self.Valid.meta.axistags = vigra.defaultAxistags('t')
self.Valid.meta.dtype = np.uint8
def execute(self, slot, subindex, roi, result):
assert slot is self.Valid
start = roi.start[0]
stop = roi.stop[0]
valid = self._valid_combiner.Output[start:stop, :].wait()
result[:] = np.all(valid, axis=1)
def propagateDirty(self, slot, subindex, roi):
# Output is propagated internally, Valid should be static
pass | tsdl/features/combiners.py | import numpy as np
import vigra
from lazyflow.operators.generic import OpMultiArrayStacker
from tsdl.tools import Operator, InputSlot, OutputSlot
from tsdl.tools import build_operator
class OpSimpleCombiner(Operator):
"""
combines a list of feature operators into one (horizontally)
operators must have slots Input and Output
"""
Input = InputSlot()
Output = OutputSlot()
Valid = OutputSlot()
@classmethod
def build(cls, config, parent=None, graph=None, workingdir=None):
"""
config["operators"] = <tuple of operator classes or config dicts>
"""
to_combine = config["operators"]
operator = cls(to_combine, parent=parent, graph=graph)
return operator
def __init__(self, to_combine, *args, **kwargs):
super(OpSimpleCombiner, self).__init__(*args, **kwargs)
operators = [build_operator(item, parent=self) for item in to_combine]
combiner = OpMultiArrayStacker(parent=self)
combiner.AxisFlag.setValue('c')
combiner.Images.resize(len(operators))
for index, operator in enumerate(operators):
combiner.Images[index].connect(operator.Output)
operator.Input.connect(self.Input)
valid_combiner = OpMultiArrayStacker(parent=self)
valid_combiner.AxisFlag.setValue('c')
valid_operators = [op for op in operators if hasattr(op, "Valid")]
valid_combiner.Images.resize(len(valid_operators))
for index, operator in enumerate(valid_operators):
valid_combiner.Images[index].connect(operator.Valid)
self._combiner = combiner
self._valid_combiner = valid_combiner
self._operators = operators
self.Output.connect(combiner.Output)
def setupOutputs(self):
size = self._operators[0].Input.meta.shape[0]
self.Valid.meta.shape = (size,)
self.Valid.meta.axistags = vigra.defaultAxistags('t')
self.Valid.meta.dtype = np.uint8
def execute(self, slot, subindex, roi, result):
assert slot is self.Valid
start = roi.start[0]
stop = roi.stop[0]
valid = self._valid_combiner.Output[start:stop, :].wait()
result[:] = np.all(valid, axis=1)
def propagateDirty(self, slot, subindex, roi):
# Output is propagated internally, Valid should be static
pass
class OpChain(Operator):
"""
chains a list of feature operators (vertically)
operators must have slots Input and Output
"""
Input = InputSlot()
Output = OutputSlot()
Valid = OutputSlot()
@classmethod
def build(cls, config, parent=None, graph=None, workingdir=None):
"""
config["operators"] = <tuple of operator classes or config dicts>
"""
to_combine = config["operators"]
operator = cls(to_combine, parent=parent, graph=graph)
return operator
def __init__(self, to_combine, *args, **kwargs):
super(OpChain, self).__init__(*args, **kwargs)
next_slot = self.Input
operators = [build_operator(item, parent=self) for item in to_combine]
for operator in operators:
operator.Input.connect(next_slot)
next_slot = operator.Output
valid_combiner = OpMultiArrayStacker(parent=self)
valid_combiner.AxisFlag.setValue('c')
valid_operators = [op for op in operators if hasattr(op, "Valid")]
valid_combiner.Images.resize(len(valid_operators))
for index, operator in enumerate(valid_operators):
valid_combiner.Images[index].connect(operator.Valid)
self.Output.connect(next_slot)
self._operators = operators
self._valid_combiner = valid_combiner
def setupOutputs(self):
size = self._operators[0].Input.meta.shape[0]
self.Valid.meta.shape = (size,)
self.Valid.meta.axistags = vigra.defaultAxistags('t')
self.Valid.meta.dtype = np.uint8
def execute(self, slot, subindex, roi, result):
assert slot is self.Valid
start = roi.start[0]
stop = roi.stop[0]
valid = self._valid_combiner.Output[start:stop, :].wait()
result[:] = np.all(valid, axis=1)
def propagateDirty(self, slot, subindex, roi):
# Output is propagated internally, Valid should be static
pass | 0.764408 | 0.368576 |
from collections import defaultdict
from fhir import resources as fr
from fhir.resources import reference as rf
import json
import os
import uuid
from pydicom import dcmread
from pydicom import dataset
import dicom2fhirutils
#TODO: So I feel like everytime the item is missing in DICOM, accessing that item results in runtime error.. we will need to wrap each call in separately?
def addInstance(study: fr.imagingstudy.ImagingStudy, series: fr.imagingstudy.ImagingStudySeries, ds: dataset.FileDataset, fp):
if series['instance'] is not None:
selectedInstance = next((i for i in series.instance if i.uid == ds.instanceUID), None)
else:
series['instance'] = []
if selectedInstance is not None:
print ("Error: SOP Instance UID is not unique")
print(selectedInstance.as_json())
return
instance = {
'uid': ds.SOPInstanceUID,
'sopClass': gen_instance_sopclass(ds.SOPClassUID),
'number': ds.InstanceNumber,
'title': ds.ConceptNameCodeSequence if series.modality.code == "SR" else '\\'.join(ds.ImageType)
}
series['instance'].append(fr.imagingstudy.ImagingStudySeriesInstance(instance))
study['numberOfInstances'] += 1 # really needed? can we not count after it's finished?
series['numberOfInstances'] += 1 # really needed? can we not count after it's finished?
return
def addSeries(study: fr.imagingstudy.ImagingStudy, ds: dataset.FileDataset, fp):
# TODO: Add test for studyInstanceUID ... another check to make sure it matches
# TODO: CLEAN -> should function that works for both instances and series...
if study['series'] is not None:
checkSeries = next((s for s in study.series if s.uid == ds.seriesInstanceUID), None)
else:
study['series'] = []
if checkSeries is not None:
addInstance(study, checkSeries, ds, fp)
return
series = {
'uid': ds.seriesInstanceUID,
'description': ds.SeriesDescription,
'number': ds.SeriesNumber,
'numberOfInstances': 0,
'modality': gen_modality_coding(ds.Modality),
'started': gen_started_datetime(ds.SeriesDdate, ds.SeriesTime),
'bodySite': gen_coding_text_only(ds.BodyPartExamined),
'laterality': gen_coding_text_only(ds.Laterality),
}
addInstance(study, series, ds, fp)
update_study_modality_list(study, series['modality'])
study['series'].append(fr.imagingstudy.ImagingStudySeries(series))
study['numberOfSeries'] = study.numberOfSeries + 1 # really needed? can we not count after it's finished?
return
# Shouldn't this really be a class and this function a constructor?!
def createImagingStudy(ds: dataset.FileDataset, fp) -> fr.imagingstudy.ImagingStudy:
patientReference = rf.Reference()
patientref = "patient.contained.inline"
study = {
'id': str(uuid.uuid4()),
'status': 'available',
'description': ds.StudyDescription,
# TODO: ds.IssuerOfAccessionNumberSequence unable to obtain
# the object and identify correct logic for issuer (SQ)
'identifier': [gen_accession_identifier(ds.AccessionNumber), gen_studyinstanceuid_identifier(ds.StudyInstanceUID)],
'contained': [inline_patient_resource(
patientref,
ds.PatientID,
"",
ds.PatientName,
ds.PatientSex,
ds.PatientBirthDate)],
'reference': '#' + patientref,
'subject': patientReference,
'endpoint': [rf.Reference(reference="file:///" +dcmDir)],
'procedureCode': gen_procedurecode_array(dcm_coded_concept(ds.ProcedureCodeSequence)),
'studyStarted': gen_started_datetime(ds.StudyDate, ds.StudyTime),
'reasonCode': (gen_reason(
dcm_coded_concept(ds.ReasonForRequestedProcedureCodeSequence),
ds.ReasonForTheRequestedProcedure)
),
'numberOfSeries': 0,
'numberOfInstances': 0,
}
addSeries(study, ds, fp)
return fr.imagingstudy.ImagingStudy(**study)
def process_dicom_2_fhir(dcmDir: str) -> fr.imagingstudy.ImagingStudy:
dcmDict = {}
files = []
for r, d, f in os.walk(dcmDir):
for file in f:
if file != '.DS_Store':
files.append(os.path.join(r, file))
imagingStudy=None
for fp in files:
with dcmread(fp, None,[0x7FE00010], force=True) as ds:
if studyInstanceUID is None:
studyInstanceUID = ds.StudyInstanceUID
if studyInstanceUID != ds.StudyInstanceUID:
raise Exception("Incorrect DCM path, more than one study detected")
return None
if imagingStudy is None:
imagingStudy = createImagingStudy(ds, fp)
else:
addSeries(imagingStudy, ds, fp)
return imagingStudy | dicom2fhir/dicom2fhir.py | from collections import defaultdict
from fhir import resources as fr
from fhir.resources import reference as rf
import json
import os
import uuid
from pydicom import dcmread
from pydicom import dataset
import dicom2fhirutils
#TODO: So I feel like everytime the item is missing in DICOM, accessing that item results in runtime error.. we will need to wrap each call in separately?
def addInstance(study: fr.imagingstudy.ImagingStudy, series: fr.imagingstudy.ImagingStudySeries, ds: dataset.FileDataset, fp):
if series['instance'] is not None:
selectedInstance = next((i for i in series.instance if i.uid == ds.instanceUID), None)
else:
series['instance'] = []
if selectedInstance is not None:
print ("Error: SOP Instance UID is not unique")
print(selectedInstance.as_json())
return
instance = {
'uid': ds.SOPInstanceUID,
'sopClass': gen_instance_sopclass(ds.SOPClassUID),
'number': ds.InstanceNumber,
'title': ds.ConceptNameCodeSequence if series.modality.code == "SR" else '\\'.join(ds.ImageType)
}
series['instance'].append(fr.imagingstudy.ImagingStudySeriesInstance(instance))
study['numberOfInstances'] += 1 # really needed? can we not count after it's finished?
series['numberOfInstances'] += 1 # really needed? can we not count after it's finished?
return
def addSeries(study: fr.imagingstudy.ImagingStudy, ds: dataset.FileDataset, fp):
# TODO: Add test for studyInstanceUID ... another check to make sure it matches
# TODO: CLEAN -> should function that works for both instances and series...
if study['series'] is not None:
checkSeries = next((s for s in study.series if s.uid == ds.seriesInstanceUID), None)
else:
study['series'] = []
if checkSeries is not None:
addInstance(study, checkSeries, ds, fp)
return
series = {
'uid': ds.seriesInstanceUID,
'description': ds.SeriesDescription,
'number': ds.SeriesNumber,
'numberOfInstances': 0,
'modality': gen_modality_coding(ds.Modality),
'started': gen_started_datetime(ds.SeriesDdate, ds.SeriesTime),
'bodySite': gen_coding_text_only(ds.BodyPartExamined),
'laterality': gen_coding_text_only(ds.Laterality),
}
addInstance(study, series, ds, fp)
update_study_modality_list(study, series['modality'])
study['series'].append(fr.imagingstudy.ImagingStudySeries(series))
study['numberOfSeries'] = study.numberOfSeries + 1 # really needed? can we not count after it's finished?
return
# Shouldn't this really be a class and this function a constructor?!
def createImagingStudy(ds: dataset.FileDataset, fp) -> fr.imagingstudy.ImagingStudy:
patientReference = rf.Reference()
patientref = "patient.contained.inline"
study = {
'id': str(uuid.uuid4()),
'status': 'available',
'description': ds.StudyDescription,
# TODO: ds.IssuerOfAccessionNumberSequence unable to obtain
# the object and identify correct logic for issuer (SQ)
'identifier': [gen_accession_identifier(ds.AccessionNumber), gen_studyinstanceuid_identifier(ds.StudyInstanceUID)],
'contained': [inline_patient_resource(
patientref,
ds.PatientID,
"",
ds.PatientName,
ds.PatientSex,
ds.PatientBirthDate)],
'reference': '#' + patientref,
'subject': patientReference,
'endpoint': [rf.Reference(reference="file:///" +dcmDir)],
'procedureCode': gen_procedurecode_array(dcm_coded_concept(ds.ProcedureCodeSequence)),
'studyStarted': gen_started_datetime(ds.StudyDate, ds.StudyTime),
'reasonCode': (gen_reason(
dcm_coded_concept(ds.ReasonForRequestedProcedureCodeSequence),
ds.ReasonForTheRequestedProcedure)
),
'numberOfSeries': 0,
'numberOfInstances': 0,
}
addSeries(study, ds, fp)
return fr.imagingstudy.ImagingStudy(**study)
def process_dicom_2_fhir(dcmDir: str) -> fr.imagingstudy.ImagingStudy:
dcmDict = {}
files = []
for r, d, f in os.walk(dcmDir):
for file in f:
if file != '.DS_Store':
files.append(os.path.join(r, file))
imagingStudy=None
for fp in files:
with dcmread(fp, None,[0x7FE00010], force=True) as ds:
if studyInstanceUID is None:
studyInstanceUID = ds.StudyInstanceUID
if studyInstanceUID != ds.StudyInstanceUID:
raise Exception("Incorrect DCM path, more than one study detected")
return None
if imagingStudy is None:
imagingStudy = createImagingStudy(ds, fp)
else:
addSeries(imagingStudy, ds, fp)
return imagingStudy | 0.212314 | 0.344127 |
from gateway.operations.operations import Operations
from unittest import TestCase
from unittest.mock import patch
class TestOperations(TestCase):
OP = None
def setUp(self):
self.OP = Operations({}, {}, {})
def test_call_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import SmsBuilder
self.assertIsInstance(new_op.sms(), SmsBuilder)
with patch.object(new_op, 'sms') as mock:
new_op.sms()
mock.assert_called_once_with()
def test_call_dms_hold(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsHoldBuilder
self.assertIsInstance(new_op.dms_hold(), DmsHoldBuilder)
with patch.object(new_op, 'dms_hold') as mock:
new_op.dms_hold()
mock.assert_called_once_with()
def test_call_dms_charge(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsChargeBuilder
self.assertIsInstance(new_op.dms_charge(), DmsChargeBuilder)
with patch.object(new_op, 'dms_charge') as mock:
new_op.dms_charge()
mock.assert_called_once_with()
def test_call_dms_cancel(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsCancelBuilder
self.assertIsInstance(new_op.dms_cancel(), DmsCancelBuilder)
with patch.object(new_op, 'dms_cancel') as mock:
new_op.dms_cancel()
mock.assert_called_once_with()
def test_call_moto_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import MotoSmsBuilder
self.assertIsInstance(new_op.moto_sms(), MotoSmsBuilder)
with patch.object(new_op, 'moto_sms') as mock:
new_op.moto_sms()
mock.assert_called_once_with()
def test_call_moto_dms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import MotoDmsBuilder
self.assertIsInstance(new_op.moto_dms(), MotoDmsBuilder)
with patch.object(new_op, 'moto_dms') as mock:
new_op.moto_dms()
mock.assert_called_once_with()
def test_call_credit(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import CreditBuilder
self.assertIsInstance(new_op.credit(), CreditBuilder)
with patch.object(new_op, 'credit') as mock:
new_op.credit()
mock.assert_called_once_with()
def test_call_p2p(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import P2PBuilder
self.assertIsInstance(new_op.p2p(), P2PBuilder)
with patch.object(new_op, 'p2p') as mock:
new_op.p2p()
mock.assert_called_once_with()
def test_call_b2p(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import B2PBuilder
self.assertIsInstance(new_op.b2p(), B2PBuilder)
with patch.object(new_op, 'b2p') as mock:
new_op.b2p()
mock.assert_called_once_with()
def test_call_init_recurrent_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import SmsBuilder
self.assertIsInstance(new_op.init_recurrent_sms(), SmsBuilder)
with patch.object(new_op, 'init_recurrent_sms') as mock:
new_op.init_recurrent_sms()
mock.assert_called_once_with()
def test_call_recurrent_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import RecurrentSmsBuilder
self.assertIsInstance(new_op.recurrent_sms(), RecurrentSmsBuilder)
with patch.object(new_op, 'recurrent_sms') as mock:
new_op.recurrent_sms()
mock.assert_called_once_with()
def test_call_dms_hold(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsHoldBuilder
self.assertIsInstance(new_op.init_recurrent_dms(), DmsHoldBuilder)
with patch.object(new_op, 'init_recurrent_dms') as mock:
new_op.init_recurrent_dms()
mock.assert_called_once_with()
def test_call_recurrent_dms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import RecurrentDmsBuilder
self.assertIsInstance(new_op.recurrent_dms(), RecurrentDmsBuilder)
with patch.object(new_op, 'recurrent_dms') as mock:
new_op.recurrent_dms()
mock.assert_called_once_with()
def test_call_refund(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import RefundBuilder
self.assertIsInstance(new_op.refund(), RefundBuilder)
with patch.object(new_op, 'refund') as mock:
new_op.refund()
mock.assert_called_once_with()
def test_call_reversal(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import ReversalBuilder
self.assertIsInstance(new_op.reversal(), ReversalBuilder)
with patch.object(new_op, 'reversal') as mock:
new_op.reversal()
mock.assert_called_once_with()
def test_call_transaction_status(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_status(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_status') as mock:
new_op.transaction_status()
mock.assert_called_once_with()
def test_call_transaction_result(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_result(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_result') as mock:
new_op.transaction_result()
mock.assert_called_once_with()
def test_call_transaction_history(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_history(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_history') as mock:
new_op.transaction_history()
mock.assert_called_once_with()
def test_call_transaction_recurrent_history(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_recurrent_history(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_recurrent_history') as mock:
new_op.transaction_recurrent_history()
mock.assert_called_once_with()
def test_call_transaction_refunds_history(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_refunds_history(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_refunds_history') as mock:
new_op.transaction_refunds_history()
mock.assert_called_once_with()
def test_call_verify_3d_enrollment(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import Verify3dBuilder
self.assertIsInstance(new_op.verify_3d_enrollment(), Verify3dBuilder)
with patch.object(new_op, 'verify_3d_enrollment') as mock:
new_op.verify_3d_enrollment()
mock.assert_called_once_with()
def test_call_verify_card(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import VerifyCardBuilder
self.assertIsInstance(new_op.verify_card(), VerifyCardBuilder)
with patch.object(new_op, 'verify_card') as mock:
new_op.verify_card()
mock.assert_called_once_with()
def test_call_create_token(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import CreateTokenBuilder
self.assertIsInstance(new_op.create_token(), CreateTokenBuilder)
with patch.object(new_op, 'create_token') as mock:
new_op.create_token()
mock.assert_called_once_with() | tests/operations/test_operations.py |
from gateway.operations.operations import Operations
from unittest import TestCase
from unittest.mock import patch
class TestOperations(TestCase):
OP = None
def setUp(self):
self.OP = Operations({}, {}, {})
def test_call_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import SmsBuilder
self.assertIsInstance(new_op.sms(), SmsBuilder)
with patch.object(new_op, 'sms') as mock:
new_op.sms()
mock.assert_called_once_with()
def test_call_dms_hold(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsHoldBuilder
self.assertIsInstance(new_op.dms_hold(), DmsHoldBuilder)
with patch.object(new_op, 'dms_hold') as mock:
new_op.dms_hold()
mock.assert_called_once_with()
def test_call_dms_charge(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsChargeBuilder
self.assertIsInstance(new_op.dms_charge(), DmsChargeBuilder)
with patch.object(new_op, 'dms_charge') as mock:
new_op.dms_charge()
mock.assert_called_once_with()
def test_call_dms_cancel(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsCancelBuilder
self.assertIsInstance(new_op.dms_cancel(), DmsCancelBuilder)
with patch.object(new_op, 'dms_cancel') as mock:
new_op.dms_cancel()
mock.assert_called_once_with()
def test_call_moto_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import MotoSmsBuilder
self.assertIsInstance(new_op.moto_sms(), MotoSmsBuilder)
with patch.object(new_op, 'moto_sms') as mock:
new_op.moto_sms()
mock.assert_called_once_with()
def test_call_moto_dms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import MotoDmsBuilder
self.assertIsInstance(new_op.moto_dms(), MotoDmsBuilder)
with patch.object(new_op, 'moto_dms') as mock:
new_op.moto_dms()
mock.assert_called_once_with()
def test_call_credit(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import CreditBuilder
self.assertIsInstance(new_op.credit(), CreditBuilder)
with patch.object(new_op, 'credit') as mock:
new_op.credit()
mock.assert_called_once_with()
def test_call_p2p(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import P2PBuilder
self.assertIsInstance(new_op.p2p(), P2PBuilder)
with patch.object(new_op, 'p2p') as mock:
new_op.p2p()
mock.assert_called_once_with()
def test_call_b2p(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import B2PBuilder
self.assertIsInstance(new_op.b2p(), B2PBuilder)
with patch.object(new_op, 'b2p') as mock:
new_op.b2p()
mock.assert_called_once_with()
def test_call_init_recurrent_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import SmsBuilder
self.assertIsInstance(new_op.init_recurrent_sms(), SmsBuilder)
with patch.object(new_op, 'init_recurrent_sms') as mock:
new_op.init_recurrent_sms()
mock.assert_called_once_with()
def test_call_recurrent_sms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import RecurrentSmsBuilder
self.assertIsInstance(new_op.recurrent_sms(), RecurrentSmsBuilder)
with patch.object(new_op, 'recurrent_sms') as mock:
new_op.recurrent_sms()
mock.assert_called_once_with()
def test_call_dms_hold(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import DmsHoldBuilder
self.assertIsInstance(new_op.init_recurrent_dms(), DmsHoldBuilder)
with patch.object(new_op, 'init_recurrent_dms') as mock:
new_op.init_recurrent_dms()
mock.assert_called_once_with()
def test_call_recurrent_dms(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import RecurrentDmsBuilder
self.assertIsInstance(new_op.recurrent_dms(), RecurrentDmsBuilder)
with patch.object(new_op, 'recurrent_dms') as mock:
new_op.recurrent_dms()
mock.assert_called_once_with()
def test_call_refund(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import RefundBuilder
self.assertIsInstance(new_op.refund(), RefundBuilder)
with patch.object(new_op, 'refund') as mock:
new_op.refund()
mock.assert_called_once_with()
def test_call_reversal(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import ReversalBuilder
self.assertIsInstance(new_op.reversal(), ReversalBuilder)
with patch.object(new_op, 'reversal') as mock:
new_op.reversal()
mock.assert_called_once_with()
def test_call_transaction_status(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_status(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_status') as mock:
new_op.transaction_status()
mock.assert_called_once_with()
def test_call_transaction_result(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_result(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_result') as mock:
new_op.transaction_result()
mock.assert_called_once_with()
def test_call_transaction_history(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_history(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_history') as mock:
new_op.transaction_history()
mock.assert_called_once_with()
def test_call_transaction_recurrent_history(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_recurrent_history(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_recurrent_history') as mock:
new_op.transaction_recurrent_history()
mock.assert_called_once_with()
def test_call_transaction_refunds_history(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import TransactionStatusBuilder
self.assertIsInstance(new_op.transaction_refunds_history(), TransactionStatusBuilder)
with patch.object(new_op, 'transaction_refunds_history') as mock:
new_op.transaction_refunds_history()
mock.assert_called_once_with()
def test_call_verify_3d_enrollment(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import Verify3dBuilder
self.assertIsInstance(new_op.verify_3d_enrollment(), Verify3dBuilder)
with patch.object(new_op, 'verify_3d_enrollment') as mock:
new_op.verify_3d_enrollment()
mock.assert_called_once_with()
def test_call_verify_card(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import VerifyCardBuilder
self.assertIsInstance(new_op.verify_card(), VerifyCardBuilder)
with patch.object(new_op, 'verify_card') as mock:
new_op.verify_card()
mock.assert_called_once_with()
def test_call_create_token(self):
"""Will succeed"""
new_op = self.OP
from gateway.builders.transaction_builder import CreateTokenBuilder
self.assertIsInstance(new_op.create_token(), CreateTokenBuilder)
with patch.object(new_op, 'create_token') as mock:
new_op.create_token()
mock.assert_called_once_with() | 0.816626 | 0.467757 |
import os
import sys
import cv2
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
import time
def make_CM(path_result, path_ref):
""" Make confusion matrix using mask generated from PT/CT image
Confusion matrix of each slices will be recorded in one csv file
This function makes 'CM_hhdd_HHMM.csv' in root directory
CAUTION: index.csv file should be located in root directory.
Input
______
path_result : path of root directory for nodule detection image
path_ref : path of root directory for reference image
Output
______
"""
# Read index from csv
index = pd.read_csv(f'{path_result}/index.csv')
patient_name = index['patient_name']
feature_index = index['feature_index']
offset = index['offset']
# Find slice number of masks
data = []
for i, addr_maskfd in enumerate(patient_name):
folder_mask = os.path.join(path_ref, addr_maskfd)
list_mask = os.listdir(folder_mask)[:-1]
folder_result = os.path.join(path_result, addr_maskfd)
# Calculate confusion matrix
for file_mask in list_mask:
addr_mask = os.path.join(folder_mask, file_mask)
mask = cv2.imread(addr_mask, cv2.IMREAD_GRAYSCALE)
crop = np.array([[230, 380], [150, 370]])
mask_crop = mask[crop[0, 0]:crop[0, 1], crop[1, 0]:crop[1, 1]]
mask_crop[mask_crop > 0] = 255
file_num = str(int(file_mask[5:8]) - offset[i])
file_roi = f'Mask/Mask_I{file_num}0.png'
file_roi_PT = f'Mask/MaskPT_I{file_num}0.png'
file_feature = f'Features/Features_{feature_index[i]}_I{file_num}0.png'
addr_roi = os.path.join(folder_result, file_roi)
addr_roi_PT = os.path.join(folder_result, file_roi_PT)
addr_feature = os.path.join(folder_result, file_feature)
roi = cv2.imread(addr_roi, cv2.IMREAD_GRAYSCALE)
roi_PT = cv2.imread(addr_roi_PT, cv2.IMREAD_GRAYSCALE)
feature = cv2.imread(addr_feature, cv2.IMREAD_GRAYSCALE)
result1 = cv2.bitwise_and(roi, feature)
result = cv2.bitwise_and(result1, roi_PT)
# Calculate confusion matrix
tn, fp, fn, tp = confusion_matrix(mask_crop.flatten() / 255, result.flatten() / 255).ravel()
data.append([addr_maskfd, file_num, tp, fp, fn, tn])
df = pd.DataFrame(data=data,
columns=['PatientName', 'SliceNumber', 'TP', 'FP', 'FN', 'TN'])
# Save result to csv file
hm = time.strftime('%H%M')
result_date = os.path.split(path_result)[-1]
df.to_csv(f'{path_result}/CM_{result_date}_{hm}.csv')
def make_CM_single(path_result, path_ref):
""" Make confusion matrix using mask generated from CT image only
Confusion matrix of each slices will be recorded in one csv file
This function makes 'CM_hhdd_HHMM.csv' in root directory
CAUTION: index.csv file should be located in root directory.
Input
______
path_result : path of root directory for nodule detection image
path_ref : path of root directory for reference image
Output
______
"""
# Read index from csv
index = pd.read_csv(f'{path_result}/index.csv')
patient_name = index['patient_name']
feature_index = index['feature_index']
offset = index['offset']
# Find slice number of masks
data = []
for i, addr_maskfd in enumerate(patient_name):
folder_mask = os.path.join(path_ref, addr_maskfd)
list_mask = os.listdir(folder_mask)[:-1]
folder_result = os.path.join(path_result, addr_maskfd)
# Calculate confusion matrix for single model
for file_mask in list_mask:
addr_mask = os.path.join(folder_mask, file_mask)
mask = cv2.imread(addr_mask, cv2.IMREAD_GRAYSCALE)
crop = np.array([[230, 380], [150, 370]])
mask_crop = mask[crop[0, 0]:crop[0, 1], crop[1, 0]:crop[1, 1]]
mask_crop[mask_crop > 0] = 255
file_num = str(int(file_mask[5:8]) - offset[i])
file_roi = f'Mask/Mask_I{file_num}0.png'
file_feature = f'Features/Features_{feature_index[i]}_I{file_num}0.png'
addr_roi = os.path.join(folder_result, file_roi)
addr_feature = os.path.join(folder_result, file_feature)
roi = cv2.imread(addr_roi, cv2.IMREAD_GRAYSCALE)
feature = cv2.imread(addr_feature, cv2.IMREAD_GRAYSCALE)
result = cv2.bitwise_and(roi, feature)
# plt.imshow(result, cmap='gray')
# Calculate confusion matrix
tn, fp, fn, tp = confusion_matrix(mask_crop.flatten() / 255, result.flatten() / 255).ravel()
data.append([addr_maskfd, file_num, tp, fp, fn, tn])
df = pd.DataFrame(data=data,
columns=['PatientName', 'SliceNumber', 'TP', 'FP', 'FN', 'TN'])
# Save result to csv file
hm = time.strftime('%H%M')
result_date = os.path.split(path_result)[-1]
df.to_csv(f'{path_result}/CM_{result_date}_{hm}.csv')
def make_CM_nomask(path_result, path_ref):
""" Make confusion matrix without mask
Confusion matrix of each slices will be recorded in one csv file
This function makes 'CM_hhdd_HHMM.csv' in root directory
CAUTION: index.csv file should be located in root directory.
Input
______
path_result : path of root directory for nodule detection image
path_ref : path of root directory for reference image
Output
______
"""
# Read index from csv
index = pd.read_csv(f'{path_result}/index.csv')
patient_name = index['patient_name']
feature_index = index['feature_index']
offset = index['offset']
# Find slice number of masks
data = []
for i, addr_maskfd in enumerate(patient_name):
folder_mask = os.path.join(path_ref, addr_maskfd)
list_mask = os.listdir(folder_mask)[:-1]
folder_result = os.path.join(path_result, addr_maskfd)
# Calculate confusion matrix without masks
for file_mask in list_mask:
addr_mask = os.path.join(folder_mask, file_mask)
mask = cv2.imread(addr_mask, cv2.IMREAD_GRAYSCALE)
crop = np.array([[230, 380], [150, 370]])
mask_crop = mask[crop[0, 0]:crop[0, 1], crop[1, 0]:crop[1, 1]]
mask_crop[mask_crop > 0] = 255
file_num = str(int(file_mask[5:8]) - offset[i])
file_feature = f'Features/Features_{feature_index[i]}_I{file_num}0.png'
addr_feature = os.path.join(folder_result, file_feature)
feature = cv2.imread(addr_feature, cv2.IMREAD_GRAYSCALE)
# Calculate confusion matrix
tn, fp, fn, tp = confusion_matrix(mask_crop.flatten() / 255, feature.flatten() / 255).ravel()
data.append([addr_maskfd, file_num, tp, fp, fn, tn])
df = pd.DataFrame(data=data,
columns=['PatientName', 'SliceNumber', 'TP', 'FP', 'FN', 'TN'])
# Save result to csv file
hm = time.strftime('%H%M')
result_date = os.path.split(path_result)[-1]
df.to_csv(f'{path_result}/CM_{result_date}_{hm}.csv')
if __name__ == '__main__':
make_CM(sys.argv[1], sys.argv[2]) | Analysis/Calculate.py | import os
import sys
import cv2
import numpy as np
import pandas as pd
from sklearn.metrics import confusion_matrix
import time
def make_CM(path_result, path_ref):
""" Make confusion matrix using mask generated from PT/CT image
Confusion matrix of each slices will be recorded in one csv file
This function makes 'CM_hhdd_HHMM.csv' in root directory
CAUTION: index.csv file should be located in root directory.
Input
______
path_result : path of root directory for nodule detection image
path_ref : path of root directory for reference image
Output
______
"""
# Read index from csv
index = pd.read_csv(f'{path_result}/index.csv')
patient_name = index['patient_name']
feature_index = index['feature_index']
offset = index['offset']
# Find slice number of masks
data = []
for i, addr_maskfd in enumerate(patient_name):
folder_mask = os.path.join(path_ref, addr_maskfd)
list_mask = os.listdir(folder_mask)[:-1]
folder_result = os.path.join(path_result, addr_maskfd)
# Calculate confusion matrix
for file_mask in list_mask:
addr_mask = os.path.join(folder_mask, file_mask)
mask = cv2.imread(addr_mask, cv2.IMREAD_GRAYSCALE)
crop = np.array([[230, 380], [150, 370]])
mask_crop = mask[crop[0, 0]:crop[0, 1], crop[1, 0]:crop[1, 1]]
mask_crop[mask_crop > 0] = 255
file_num = str(int(file_mask[5:8]) - offset[i])
file_roi = f'Mask/Mask_I{file_num}0.png'
file_roi_PT = f'Mask/MaskPT_I{file_num}0.png'
file_feature = f'Features/Features_{feature_index[i]}_I{file_num}0.png'
addr_roi = os.path.join(folder_result, file_roi)
addr_roi_PT = os.path.join(folder_result, file_roi_PT)
addr_feature = os.path.join(folder_result, file_feature)
roi = cv2.imread(addr_roi, cv2.IMREAD_GRAYSCALE)
roi_PT = cv2.imread(addr_roi_PT, cv2.IMREAD_GRAYSCALE)
feature = cv2.imread(addr_feature, cv2.IMREAD_GRAYSCALE)
result1 = cv2.bitwise_and(roi, feature)
result = cv2.bitwise_and(result1, roi_PT)
# Calculate confusion matrix
tn, fp, fn, tp = confusion_matrix(mask_crop.flatten() / 255, result.flatten() / 255).ravel()
data.append([addr_maskfd, file_num, tp, fp, fn, tn])
df = pd.DataFrame(data=data,
columns=['PatientName', 'SliceNumber', 'TP', 'FP', 'FN', 'TN'])
# Save result to csv file
hm = time.strftime('%H%M')
result_date = os.path.split(path_result)[-1]
df.to_csv(f'{path_result}/CM_{result_date}_{hm}.csv')
def make_CM_single(path_result, path_ref):
""" Make confusion matrix using mask generated from CT image only
Confusion matrix of each slices will be recorded in one csv file
This function makes 'CM_hhdd_HHMM.csv' in root directory
CAUTION: index.csv file should be located in root directory.
Input
______
path_result : path of root directory for nodule detection image
path_ref : path of root directory for reference image
Output
______
"""
# Read index from csv
index = pd.read_csv(f'{path_result}/index.csv')
patient_name = index['patient_name']
feature_index = index['feature_index']
offset = index['offset']
# Find slice number of masks
data = []
for i, addr_maskfd in enumerate(patient_name):
folder_mask = os.path.join(path_ref, addr_maskfd)
list_mask = os.listdir(folder_mask)[:-1]
folder_result = os.path.join(path_result, addr_maskfd)
# Calculate confusion matrix for single model
for file_mask in list_mask:
addr_mask = os.path.join(folder_mask, file_mask)
mask = cv2.imread(addr_mask, cv2.IMREAD_GRAYSCALE)
crop = np.array([[230, 380], [150, 370]])
mask_crop = mask[crop[0, 0]:crop[0, 1], crop[1, 0]:crop[1, 1]]
mask_crop[mask_crop > 0] = 255
file_num = str(int(file_mask[5:8]) - offset[i])
file_roi = f'Mask/Mask_I{file_num}0.png'
file_feature = f'Features/Features_{feature_index[i]}_I{file_num}0.png'
addr_roi = os.path.join(folder_result, file_roi)
addr_feature = os.path.join(folder_result, file_feature)
roi = cv2.imread(addr_roi, cv2.IMREAD_GRAYSCALE)
feature = cv2.imread(addr_feature, cv2.IMREAD_GRAYSCALE)
result = cv2.bitwise_and(roi, feature)
# plt.imshow(result, cmap='gray')
# Calculate confusion matrix
tn, fp, fn, tp = confusion_matrix(mask_crop.flatten() / 255, result.flatten() / 255).ravel()
data.append([addr_maskfd, file_num, tp, fp, fn, tn])
df = pd.DataFrame(data=data,
columns=['PatientName', 'SliceNumber', 'TP', 'FP', 'FN', 'TN'])
# Save result to csv file
hm = time.strftime('%H%M')
result_date = os.path.split(path_result)[-1]
df.to_csv(f'{path_result}/CM_{result_date}_{hm}.csv')
def make_CM_nomask(path_result, path_ref):
""" Make confusion matrix without mask
Confusion matrix of each slices will be recorded in one csv file
This function makes 'CM_hhdd_HHMM.csv' in root directory
CAUTION: index.csv file should be located in root directory.
Input
______
path_result : path of root directory for nodule detection image
path_ref : path of root directory for reference image
Output
______
"""
# Read index from csv
index = pd.read_csv(f'{path_result}/index.csv')
patient_name = index['patient_name']
feature_index = index['feature_index']
offset = index['offset']
# Find slice number of masks
data = []
for i, addr_maskfd in enumerate(patient_name):
folder_mask = os.path.join(path_ref, addr_maskfd)
list_mask = os.listdir(folder_mask)[:-1]
folder_result = os.path.join(path_result, addr_maskfd)
# Calculate confusion matrix without masks
for file_mask in list_mask:
addr_mask = os.path.join(folder_mask, file_mask)
mask = cv2.imread(addr_mask, cv2.IMREAD_GRAYSCALE)
crop = np.array([[230, 380], [150, 370]])
mask_crop = mask[crop[0, 0]:crop[0, 1], crop[1, 0]:crop[1, 1]]
mask_crop[mask_crop > 0] = 255
file_num = str(int(file_mask[5:8]) - offset[i])
file_feature = f'Features/Features_{feature_index[i]}_I{file_num}0.png'
addr_feature = os.path.join(folder_result, file_feature)
feature = cv2.imread(addr_feature, cv2.IMREAD_GRAYSCALE)
# Calculate confusion matrix
tn, fp, fn, tp = confusion_matrix(mask_crop.flatten() / 255, feature.flatten() / 255).ravel()
data.append([addr_maskfd, file_num, tp, fp, fn, tn])
df = pd.DataFrame(data=data,
columns=['PatientName', 'SliceNumber', 'TP', 'FP', 'FN', 'TN'])
# Save result to csv file
hm = time.strftime('%H%M')
result_date = os.path.split(path_result)[-1]
df.to_csv(f'{path_result}/CM_{result_date}_{hm}.csv')
if __name__ == '__main__':
make_CM(sys.argv[1], sys.argv[2]) | 0.434461 | 0.258542 |
import ConfigParser
import json
import Queue
from serenity_pypeline.logger import log
from serenity_pypeline.protos.mesos_pb2 import ResourceUsage
import serenity_pypeline.protos.serenity_pb2
from task import Task
class WorkflowNotFoundException(Exception):
pass
class PipelineEngine(object):
CONFIG_PATH = '/etc/serenity-pypeline/serenity-pypeline.conf'
def __init__(self, workflow_path=None):
self._workflow_path = workflow_path
self._options = self._get_options(self._get_config())
self._workflow_json = self._get_workflow()
self._first_task, self._workflow = self._create_workflow(
self._workflow_json
)
self._initialize_objects()
log.info(
"Pypeline initialized with workflow: " + self._workflow_json['name']
)
def run(self, serializedUsage):
"""
Runs every task from workflow
As parameter we expect ResourceUsage.proto from mesos.proto
:return: Status of workflow execution
"""
usage = ResourceUsage()
try:
usage.ParseFromString(serializedUsage)
except Exception as e:
log.error("Exception occurred while parsing usage: " + str(e))
return 1, e
queue = Queue.Queue()
queue.put(self._first_task)
run_res = (0, None)
while not queue.empty():
task = queue.get()
try:
if task.input is None:
result = task.run(usage=usage)
else:
result = task.run(**task.input)
for t in task.next_success:
t.input = result
queue.put(t)
except Exception as e:
import traceback
log.error(traceback.format_exc())
log.error("Exception occurred while execution tasks: " + str(e))
run_res = (1, e)
for t in task.next_error:
t.input = {"error": e}
queue.put(t)
return run_res
def _get_config(self):
config = ConfigParser.ConfigParser()
config.read(PipelineEngine.CONFIG_PATH)
return config
def _get_options(self, config):
result = {}
for section in config.sections():
result[section] = self._get_option(config, section)
return result
def _get_option(self, config, section):
result = {}
for option in config.options(section):
result[option] = config.get(section, option)
return result
def _get_workflow(self):
if self._workflow_path is None:
self._workflow_path = self._options['workflow']['path']
with open(self._workflow_path) as wf_file:
wf = json.load(wf_file)
if not wf:
raise WorkflowNotFoundException("Cannot load workflow")
return wf
def _create_workflow(self, wf_definition):
wf = wf_definition['definition']
start_point = wf[wf['start']]
start_task = Task(start_point['run'])
queue = Queue.Queue()
start_point['class'] = start_task
start_point['in_queue'] = True
queue.put(start_point)
while not queue.empty():
task = queue.get()
for s in task['onSuccess']:
tmp = wf[s]
if 'in_queue' not in tmp:
tmp['in_queue'] = True
tmp['class'] = Task(tmp['run'])
queue.put(tmp)
task['class'].add_success(tmp['class'])
for e in task['onFail']:
tmp = wf[e]
if 'in_queue' not in tmp:
tmp['in_queue'] = True
tmp['class'] = Task(tmp['run]'])
queue.put(tmp)
task['class'].add_error(tmp['class'])
return start_task, wf
def _initialize_objects(self):
queue = Queue.Queue()
queue.put(self._first_task)
while not queue.empty():
task = queue.get()
[queue.put(t) for t in task.next_error]
[queue.put(t) for t in task.next_success]
if not task.is_initialized():
task.init_class(self._options) | serenity_pypeline/pipeline_engine.py | import ConfigParser
import json
import Queue
from serenity_pypeline.logger import log
from serenity_pypeline.protos.mesos_pb2 import ResourceUsage
import serenity_pypeline.protos.serenity_pb2
from task import Task
class WorkflowNotFoundException(Exception):
pass
class PipelineEngine(object):
CONFIG_PATH = '/etc/serenity-pypeline/serenity-pypeline.conf'
def __init__(self, workflow_path=None):
self._workflow_path = workflow_path
self._options = self._get_options(self._get_config())
self._workflow_json = self._get_workflow()
self._first_task, self._workflow = self._create_workflow(
self._workflow_json
)
self._initialize_objects()
log.info(
"Pypeline initialized with workflow: " + self._workflow_json['name']
)
def run(self, serializedUsage):
"""
Runs every task from workflow
As parameter we expect ResourceUsage.proto from mesos.proto
:return: Status of workflow execution
"""
usage = ResourceUsage()
try:
usage.ParseFromString(serializedUsage)
except Exception as e:
log.error("Exception occurred while parsing usage: " + str(e))
return 1, e
queue = Queue.Queue()
queue.put(self._first_task)
run_res = (0, None)
while not queue.empty():
task = queue.get()
try:
if task.input is None:
result = task.run(usage=usage)
else:
result = task.run(**task.input)
for t in task.next_success:
t.input = result
queue.put(t)
except Exception as e:
import traceback
log.error(traceback.format_exc())
log.error("Exception occurred while execution tasks: " + str(e))
run_res = (1, e)
for t in task.next_error:
t.input = {"error": e}
queue.put(t)
return run_res
def _get_config(self):
config = ConfigParser.ConfigParser()
config.read(PipelineEngine.CONFIG_PATH)
return config
def _get_options(self, config):
result = {}
for section in config.sections():
result[section] = self._get_option(config, section)
return result
def _get_option(self, config, section):
result = {}
for option in config.options(section):
result[option] = config.get(section, option)
return result
def _get_workflow(self):
if self._workflow_path is None:
self._workflow_path = self._options['workflow']['path']
with open(self._workflow_path) as wf_file:
wf = json.load(wf_file)
if not wf:
raise WorkflowNotFoundException("Cannot load workflow")
return wf
def _create_workflow(self, wf_definition):
wf = wf_definition['definition']
start_point = wf[wf['start']]
start_task = Task(start_point['run'])
queue = Queue.Queue()
start_point['class'] = start_task
start_point['in_queue'] = True
queue.put(start_point)
while not queue.empty():
task = queue.get()
for s in task['onSuccess']:
tmp = wf[s]
if 'in_queue' not in tmp:
tmp['in_queue'] = True
tmp['class'] = Task(tmp['run'])
queue.put(tmp)
task['class'].add_success(tmp['class'])
for e in task['onFail']:
tmp = wf[e]
if 'in_queue' not in tmp:
tmp['in_queue'] = True
tmp['class'] = Task(tmp['run]'])
queue.put(tmp)
task['class'].add_error(tmp['class'])
return start_task, wf
def _initialize_objects(self):
queue = Queue.Queue()
queue.put(self._first_task)
while not queue.empty():
task = queue.get()
[queue.put(t) for t in task.next_error]
[queue.put(t) for t in task.next_success]
if not task.is_initialized():
task.init_class(self._options) | 0.340924 | 0.131759 |
import os
import logging
import json
import feedparser
import requests
from datetime import datetime
from collections import namedtuple
from bs4 import BeautifulSoup
from fpdf import FPDF
class RssParser:
"""
Class to parse RSS-news
"""
def __init__(self, url: str, limit: int, verbose: bool, date: str, html_path: str, pdf_path: str):
"""
This function initializes the RssParser instance
:param url: rss-feed to be parsed
:param limit: number of news to be printed
:param verbose: flag of verbosity
:param date: date to print news of the specified day
:return: None
"""
self.url = url
self.limit = limit
self.feed = ''
self.news = []
self.verbose = verbose
self.date = date
self.link_data = namedtuple('link', 'id url type')
self.image_data = namedtuple('image', 'id url type alt')
self.article = namedtuple('article', 'title date url description links')
if self.verbose:
self.logger = self.create_logger('rss-parser')
self.logger.info('logging enabled')
self.data_path = self.create_folder(os.path.dirname(__file__), 'data')
self.img_path = self.create_folder(self.data_path, 'images')
self.html_path = html_path
self.pdf_path = pdf_path
if self.verbose:
self.logger.info('RssReader object was initialized successfully')
def parse_rss(self):
"""
This function parses rss-link
:return: None
"""
rss_feed = feedparser.parse(self.url)
if rss_feed['bozo']:
raise ValueError("Wrong URL address or Internet access is unavailable")
if self.verbose:
self.logger.info(f'Source feed was received')
self.feed = rss_feed['feed']['title']
if self.limit > 0:
entries = rss_feed.entries[:self.limit]
if self.verbose:
self.logger.info(f'News number in feed was cropped down to {self.limit} news')
else:
entries = rss_feed.entries
for entry in entries:
my_article = self.create_article(entry)
self.news.append(my_article)
if self.verbose:
self.logger.info(f'{self.limit} news have been fetched from source')
def parse_rss_link(self, entry_link: dict, link_id: int, link_type: str) -> namedtuple:
"""
This function parses link (link or image) and creates link or image data object (namedtuple)
:param entry_link: link to be parsed
:param link_id: link id in list of links
:param link_type: image or just a link
:return: parsed_link - link or image date object (namedtuple)
"""
if link_type == 'link':
link_url = entry_link['href']
parsed_link = self.link_data(link_id, link_url, 'link')
else:
image_alt = entry_link['alt']
image_url = entry_link['src']
parsed_link = self.image_data(link_id, image_url, 'image', image_alt)
return parsed_link
def create_article(self, entry: dict) -> namedtuple:
"""
This function parses raw article and creates article object from it (namedtuple)
:param entry: article to be parsed
:return: parsed_article - article data object (namedtuple)
"""
title = (entry.get('title').replace(''', "'"))
date = entry.get('published')
url = entry.get('link')
links = []
soup = BeautifulSoup(entry['summary_detail']['value'], features='html.parser')
for entry_link in soup.findAll('a'):
my_link = self.parse_rss_link(entry_link, len(links), 'link')
links.append(my_link)
for entry_image in soup.findAll('img'):
my_link = self.parse_rss_link(entry_image, len(links), 'image')
links.append(my_link)
description = soup.text.replace(''', "'")
parsed_article = self.article(title, date, url, description, links)
return parsed_article
def parse_json_cache(self):
"""
This function parses json cache from cache json file
:return: None
"""
cache_file_path = os.path.join(self.data_path, "news_cache.json")
if os.path.exists(cache_file_path) and os.path.getsize(cache_file_path) > 0:
with open(cache_file_path, 'r') as cache_file:
json_cache = json.load(cache_file)
if self.verbose:
self.logger.info(f'News are getting fetched from local cache. '
f'Path to cache file: {cache_file_path}')
for feed_instance in json_cache['news']:
if feed_instance['url'] == self.url:
self.feed = feed_instance['feed']
cached_news = feed_instance['news_objects']
for article in cached_news:
my_article = self.create_cached_article(article)
my_article_date_string = self.format_date_string(article['date'])
if my_article_date_string == self.date:
self.news.append(my_article)
if self.limit > 0:
self.news = self.news[:self.limit]
cached_news_count = self.limit if self.limit >= len(cached_news) else len(cached_news)
total_cached_news = 0
for feed in json_cache['news']:
total_cached_news += len(feed['news_objects'])
if self.verbose:
self.logger.info(f'{cached_news_count} news have been fetched from local cache')
self.logger.info(f'{total_cached_news} news are in the local cache now')
else:
print('rss-reader: info : Parse some online news first so there will be something to read from cache')
exit()
@staticmethod
def format_date_string(date: str) -> str:
"""
This function converts time strings to %Y%m%d format to compare date of article with input
:param date:
:return: my_article_date_string - converted date string
"""
if any(char in date for char in ('+', '-')):
my_article_date_obj = datetime.strptime(date, '%a, %d %b %Y %H:%M:%S %z')
else:
my_article_date_obj = datetime.strptime(date, '%a, %d %b %Y %H:%M:%S %Z')
my_article_date_string = datetime.strftime(my_article_date_obj, '%Y%m%d')
return my_article_date_string
def parse_cached_link(self, link: dict) -> namedtuple:
"""
This function parses cached link and creates link or image data object (namedtuple) from it
:param link: link to be parsed
:return: parsed_link - link or image data object (namedtuple)
"""
if link['type'] == 'image':
link_id = link['id']
image_url = link['url']
link_type = link['type']
image_alt = link['alt']
parsed_link = self.image_data(link_id, image_url, link_type, image_alt)
else:
link_id = link['id']
link_url = link['url']
link_type = link['type']
parsed_link = self.link_data(link_id, link_url, link_type)
return parsed_link
def create_cached_article(self, article: dict) -> namedtuple:
"""
This function parses cached article and creates article data object (namedtuple) from it
:param article: article to be parsed
:return: parsed_article - article data object (namedtuple)
"""
parsed_links = []
for link in article['links']:
my_link = self.parse_cached_link(link)
parsed_links.append(my_link)
title = article['title']
date = article['date']
url = article['url']
description = article['description']
links = parsed_links
parsed_article = self.article(title, date, url, description, links)
return parsed_article
def feed_to_json(self):
"""
This function converts current feed to JSON format
:return: None
"""
article_list = []
for article in self.news:
my_article_dict = self.article_to_json(article)
article_list.append(my_article_dict)
if self.verbose:
self.logger.info('Feed was converted to JSON format')
return {'feed': self.feed, 'url': self.url, 'news_objects': article_list}
def article_to_json(self, article: namedtuple) -> dict:
"""
This function converts article to JSON format
:param article: article to be converted
:return: json_article_dict - article in JSON dictionary format
"""
links_list = []
for link in article.links:
my_json_link = self.link_to_json(link)
links_list.append(my_json_link)
json_article_dict = dict(zip(('title', 'date', 'url', 'description', 'links'),
(article.title, article.date, article.url, article.description, links_list)))
return json_article_dict
@staticmethod
def link_to_json(link: namedtuple) -> dict:
"""
This function converts link to JSON format
:param link:
:return: json_link_dict - link in JSON dictionary format
"""
if link.type == 'link':
json_link_dict = dict(zip(('id', 'url', 'type'), (link.id, link.url, link.type)))
else:
json_link_dict = dict(zip(('id', 'url', 'type', 'alt'), (link.id, link.url, link.type, link.alt)))
return json_link_dict
def feed_to_string(self):
"""
This function converts current feed to string to be printed out
:return: result_string - string containing news to be printed in human-readable format
"""
if len(self.news) == 0:
return 'No news for that day, try another'
else:
result_string = ''
result_string += f'\nFeed: {self.feed}\n\n'
for article in self.news:
result_string += f'Title: {article.title}\nDate: {article.date}\nUrl: {article.url}\n\n'
for link in article.links:
if link.type == 'image':
result_string += f'[image {link.id + 1} : {link.alt}][{link.id + 1}]'
result_string += f'{article.description}\n\n'
break
result_string += f'Links:\n'
for link in article.links:
if link.type == 'image':
if link.url:
result_string += f'[{link.id + 1}]: {link.url} ({link.type})\n'
else:
result_string += f'[{link.id + 1}]: {link.alt} (invalid url or no image)({link.type})\n'
else:
result_string += f'[{link.id + 1}]: {link.url} ({link.type})\n'
result_string += f'\n'
if self.verbose:
self.logger.info('Feed was converted to text format')
return result_string
def feed_to_html(self):
"""
This function converts current feed to string to be written to HTML file
:return: result_string - string containing news to be written to HTML file
"""
result_string = ''
result_string += f'<!DOCTYPE html><html><title>rss-feed</title>'
result_string += f'<body><h3>Feed: {self.feed}</h3>'
for article in self.news:
result_string += f'<h4 style="display:inline">Title:</h4><span> {article.title}</span><br>' \
f'<h4 style="display:inline">Date:</h4><span> {article.date}</span><br>' \
f'<h4 style="display:inline">Url:</h4><span> {article.url}</span><br><br>'
for link in article.links:
if link.type == 'image':
result_string += f'<img src="{link.url}" width="10%"><br><br>'
result_string += f'<span>{article.description}</span><br><br>'
break
result_string += f'<span>Links:</span><br>'
for link in article.links:
if link.type == 'image':
if link.url:
result_string += f'<span>[{link.id + 1}]: </span>' \
f'<a href="{link.url}">{link.alt}({link.type})</a><br>'
else:
result_string += f'<span>[{link.id + 1}]: </span>' \
f'<span>{link.alt}(invalid url or no image)({link.type})</span><br>'
else:
result_string += f'<span>[{link.id + 1}]: </span>' \
f'<a href="{link.url}">{link.url}({link.type})</a><br>'
result_string += f'</body></html><br>'
if self.verbose:
self.logger.info('Feed was converted to HTML format')
return result_string
def feed_to_pdf(self):
"""
This function converts current feed to PDF document
:return: pdf - PDF document containing news feed
"""
pdf = FPDF()
pdf.add_page()
font_path = os.path.join(os.path.dirname(__file__), 'fonts', 'ttf', 'DejaVuSerifCondensed.ttf')
pdf.add_font('DejaVu', '', font_path, uni=True)
pdf.set_font('DejaVu', '', 14)
pdf.set_margins(10, 10, 5)
pdf.cell(w=0, h=5, txt=self.feed)
pdf.ln()
pdf.ln()
for article in self.news:
pdf.set_font_size(12)
pdf.multi_cell(w=0, h=5, txt=f'Title: {article.title}')
pdf.multi_cell(w=0, h=5, txt=f'Date: {article.date}')
pdf.multi_cell(w=0, h=5, txt=f'Url: {article.url}')
pdf.ln()
images = self.download_images(article, self.img_path, self.news.index(article))
if len(images):
if images[0]:
pdf.image(images[0], w=30)
pdf.ln()
pdf.multi_cell(w=0, h=5, txt=article.description)
pdf.ln()
pdf.cell(w=0, h=5, txt=f'Links:')
pdf.ln()
for link in article.links:
if link.type == 'image':
if link.url:
pdf.multi_cell(w=0, h=5, txt=f'[{link.id + 1}]: {link.url} ({link.type})')
else:
pdf.multi_cell(w=0, h=5, txt=f'[{link.id + 1}]: {link.alt} (invalid url or no image)'
f'({link.type})')
else:
pdf.multi_cell(w=0, h=5, txt=f'[{link.id + 1}]: {link.url} ({link.type})')
pdf.ln()
pdf.ln()
if self.verbose:
self.logger.info('Feed was converted to PDF format')
return pdf
def cache_feed_to_json_file(self):
"""
This function caches current feed to cache .json file
:return: None
"""
cache_file_path = os.path.join(self.data_path, "news_cache.json")
if not os.path.exists(cache_file_path):
cache_file = open(cache_file_path, 'w+')
cache_file.close()
if self.verbose:
self.logger.info(f'News cache has been created. '
f'Path to cache file: {cache_file_path}')
json_feed = self.feed_to_json()
if os.path.getsize(cache_file_path) > 0:
with open(cache_file_path, 'r') as cache_file:
json_cache = json.load(cache_file)
found = False
for feed in json_cache['news']:
if feed['url'] == self.url:
found = True
cached_news = 0
for news in json_feed['news_objects']:
if news not in feed['news_objects']:
feed['news_objects'].append(news)
cached_news += 1
if not found:
json_cache['news'].append(json_feed)
cached_news = len(json_feed['news_objects'])
total_cached_news = 0
for feed in json_cache['news']:
total_cached_news += len(feed['news_objects'])
with open(cache_file_path, 'w') as cache_file:
json.dump(json_cache, cache_file)
else:
with open(cache_file_path, 'w') as cache_file:
json_file_format = {'news': [json_feed]}
json.dump(json_file_format, cache_file)
cached_news = total_cached_news = len(json_feed['news_objects'])
if self.verbose:
self.logger.info(f'{cached_news} online news have been saved in local cache (duplicates were removed)')
self.logger.info(f'{total_cached_news} online news are cached in the file now')
def cache_feed_to_html_file(self):
"""
This function caches current feed to cache HTML file
:return: None
"""
if self.html_path == "default":
cache_file_path = os.path.join(self.data_path, 'news_cache.html')
else:
if self.html_path == os.path.abspath(self.html_path):
cache_file_path = self.html_path
else:
cache_file_path = os.path.join(os.getcwd(), self.html_path)
if not os.path.exists(cache_file_path):
html_cache_file = open(cache_file_path, "w+")
html_cache_file.close()
if os.path.isfile(cache_file_path):
with open(cache_file_path, 'w+') as cache_file:
cache_file.write(self.feed_to_html())
if self.verbose:
self.logger.info(f'News have been cached to HTML file. Path to file: {cache_file_path}')
def cache_feed_to_pdf_file(self):
"""
This function caches current feed to cache PDF file
:return: None
"""
if self.pdf_path == "default":
cache_file_path = os.path.join(self.data_path, 'news_cache.pdf')
else:
if self.pdf_path == os.path.abspath(self.pdf_path):
cache_file_path = self.pdf_path
else:
cache_file_path = os.path.join(os.getcwd(), self.pdf_path)
if not os.path.exists(cache_file_path):
pdf_cache_file = open(cache_file_path, "w+")
pdf_cache_file.close()
pdf = self.feed_to_pdf()
if os.path.isfile(cache_file_path):
pdf.output(cache_file_path)
if self.verbose:
self.logger.info(f'News have been cached to PDF file. Path to file: {cache_file_path}')
@staticmethod
def create_logger(logging_module: str):
"""
This function creates logger
:param logging_module: logging module to be used
:return: logger - logger for current module
"""
logger = logging.getLogger(logging_module)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(funcName)s - %(levelname)s - %(message)s')
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
return logger
def create_folder(self, path: str, folder_name: str) -> str:
"""
This function creates new folder
:param path: path where new folder will be created
:param folder_name: name of new folder
:return: new_folder_path - path to created folder
"""
if os.path.exists(path):
new_folder_path = os.path.join(path, folder_name)
if not os.path.exists(new_folder_path):
os.mkdir(new_folder_path)
if self.verbose:
self.logger.info(f'New folder was created. Path to folder: {new_folder_path}')
return new_folder_path
@staticmethod
def download_content_from_url(dest: str, source: str, name: str) -> str:
"""
This function downloads file from URL
:param dest: folder to save file
:param source: url to file
:param name: name of downloaded file
:return: path_to_file - path to downloaded file
"""
path_to_file = os.path.join(dest, name)
resource = requests.get(source)
with open(path_to_file, 'wb') as content_file:
content_file.write(resource.content)
return path_to_file
def download_images(self, article: namedtuple, path: str, article_index: int) -> list:
"""
:param article: article from which images are downloaded
:param path: path to store downloaded images
:param article_index: article index in feed list
:return: images - list of images paths in local storage
"""
images = []
image_index = 0
for link in article.links:
if link.type == 'image':
if link.url:
image_path = self.download_content_from_url(path, link.url, f'{article_index}_{image_index}.jpg')
images.append(image_path)
image_index += 1
return images | rss_parser.py | import os
import logging
import json
import feedparser
import requests
from datetime import datetime
from collections import namedtuple
from bs4 import BeautifulSoup
from fpdf import FPDF
class RssParser:
"""
Class to parse RSS-news
"""
def __init__(self, url: str, limit: int, verbose: bool, date: str, html_path: str, pdf_path: str):
"""
This function initializes the RssParser instance
:param url: rss-feed to be parsed
:param limit: number of news to be printed
:param verbose: flag of verbosity
:param date: date to print news of the specified day
:return: None
"""
self.url = url
self.limit = limit
self.feed = ''
self.news = []
self.verbose = verbose
self.date = date
self.link_data = namedtuple('link', 'id url type')
self.image_data = namedtuple('image', 'id url type alt')
self.article = namedtuple('article', 'title date url description links')
if self.verbose:
self.logger = self.create_logger('rss-parser')
self.logger.info('logging enabled')
self.data_path = self.create_folder(os.path.dirname(__file__), 'data')
self.img_path = self.create_folder(self.data_path, 'images')
self.html_path = html_path
self.pdf_path = pdf_path
if self.verbose:
self.logger.info('RssReader object was initialized successfully')
def parse_rss(self):
"""
This function parses rss-link
:return: None
"""
rss_feed = feedparser.parse(self.url)
if rss_feed['bozo']:
raise ValueError("Wrong URL address or Internet access is unavailable")
if self.verbose:
self.logger.info(f'Source feed was received')
self.feed = rss_feed['feed']['title']
if self.limit > 0:
entries = rss_feed.entries[:self.limit]
if self.verbose:
self.logger.info(f'News number in feed was cropped down to {self.limit} news')
else:
entries = rss_feed.entries
for entry in entries:
my_article = self.create_article(entry)
self.news.append(my_article)
if self.verbose:
self.logger.info(f'{self.limit} news have been fetched from source')
def parse_rss_link(self, entry_link: dict, link_id: int, link_type: str) -> namedtuple:
"""
This function parses link (link or image) and creates link or image data object (namedtuple)
:param entry_link: link to be parsed
:param link_id: link id in list of links
:param link_type: image or just a link
:return: parsed_link - link or image date object (namedtuple)
"""
if link_type == 'link':
link_url = entry_link['href']
parsed_link = self.link_data(link_id, link_url, 'link')
else:
image_alt = entry_link['alt']
image_url = entry_link['src']
parsed_link = self.image_data(link_id, image_url, 'image', image_alt)
return parsed_link
def create_article(self, entry: dict) -> namedtuple:
"""
This function parses raw article and creates article object from it (namedtuple)
:param entry: article to be parsed
:return: parsed_article - article data object (namedtuple)
"""
title = (entry.get('title').replace(''', "'"))
date = entry.get('published')
url = entry.get('link')
links = []
soup = BeautifulSoup(entry['summary_detail']['value'], features='html.parser')
for entry_link in soup.findAll('a'):
my_link = self.parse_rss_link(entry_link, len(links), 'link')
links.append(my_link)
for entry_image in soup.findAll('img'):
my_link = self.parse_rss_link(entry_image, len(links), 'image')
links.append(my_link)
description = soup.text.replace(''', "'")
parsed_article = self.article(title, date, url, description, links)
return parsed_article
def parse_json_cache(self):
"""
This function parses json cache from cache json file
:return: None
"""
cache_file_path = os.path.join(self.data_path, "news_cache.json")
if os.path.exists(cache_file_path) and os.path.getsize(cache_file_path) > 0:
with open(cache_file_path, 'r') as cache_file:
json_cache = json.load(cache_file)
if self.verbose:
self.logger.info(f'News are getting fetched from local cache. '
f'Path to cache file: {cache_file_path}')
for feed_instance in json_cache['news']:
if feed_instance['url'] == self.url:
self.feed = feed_instance['feed']
cached_news = feed_instance['news_objects']
for article in cached_news:
my_article = self.create_cached_article(article)
my_article_date_string = self.format_date_string(article['date'])
if my_article_date_string == self.date:
self.news.append(my_article)
if self.limit > 0:
self.news = self.news[:self.limit]
cached_news_count = self.limit if self.limit >= len(cached_news) else len(cached_news)
total_cached_news = 0
for feed in json_cache['news']:
total_cached_news += len(feed['news_objects'])
if self.verbose:
self.logger.info(f'{cached_news_count} news have been fetched from local cache')
self.logger.info(f'{total_cached_news} news are in the local cache now')
else:
print('rss-reader: info : Parse some online news first so there will be something to read from cache')
exit()
@staticmethod
def format_date_string(date: str) -> str:
"""
This function converts time strings to %Y%m%d format to compare date of article with input
:param date:
:return: my_article_date_string - converted date string
"""
if any(char in date for char in ('+', '-')):
my_article_date_obj = datetime.strptime(date, '%a, %d %b %Y %H:%M:%S %z')
else:
my_article_date_obj = datetime.strptime(date, '%a, %d %b %Y %H:%M:%S %Z')
my_article_date_string = datetime.strftime(my_article_date_obj, '%Y%m%d')
return my_article_date_string
def parse_cached_link(self, link: dict) -> namedtuple:
"""
This function parses cached link and creates link or image data object (namedtuple) from it
:param link: link to be parsed
:return: parsed_link - link or image data object (namedtuple)
"""
if link['type'] == 'image':
link_id = link['id']
image_url = link['url']
link_type = link['type']
image_alt = link['alt']
parsed_link = self.image_data(link_id, image_url, link_type, image_alt)
else:
link_id = link['id']
link_url = link['url']
link_type = link['type']
parsed_link = self.link_data(link_id, link_url, link_type)
return parsed_link
def create_cached_article(self, article: dict) -> namedtuple:
"""
This function parses cached article and creates article data object (namedtuple) from it
:param article: article to be parsed
:return: parsed_article - article data object (namedtuple)
"""
parsed_links = []
for link in article['links']:
my_link = self.parse_cached_link(link)
parsed_links.append(my_link)
title = article['title']
date = article['date']
url = article['url']
description = article['description']
links = parsed_links
parsed_article = self.article(title, date, url, description, links)
return parsed_article
def feed_to_json(self):
"""
This function converts current feed to JSON format
:return: None
"""
article_list = []
for article in self.news:
my_article_dict = self.article_to_json(article)
article_list.append(my_article_dict)
if self.verbose:
self.logger.info('Feed was converted to JSON format')
return {'feed': self.feed, 'url': self.url, 'news_objects': article_list}
def article_to_json(self, article: namedtuple) -> dict:
"""
This function converts article to JSON format
:param article: article to be converted
:return: json_article_dict - article in JSON dictionary format
"""
links_list = []
for link in article.links:
my_json_link = self.link_to_json(link)
links_list.append(my_json_link)
json_article_dict = dict(zip(('title', 'date', 'url', 'description', 'links'),
(article.title, article.date, article.url, article.description, links_list)))
return json_article_dict
@staticmethod
def link_to_json(link: namedtuple) -> dict:
"""
This function converts link to JSON format
:param link:
:return: json_link_dict - link in JSON dictionary format
"""
if link.type == 'link':
json_link_dict = dict(zip(('id', 'url', 'type'), (link.id, link.url, link.type)))
else:
json_link_dict = dict(zip(('id', 'url', 'type', 'alt'), (link.id, link.url, link.type, link.alt)))
return json_link_dict
def feed_to_string(self):
"""
This function converts current feed to string to be printed out
:return: result_string - string containing news to be printed in human-readable format
"""
if len(self.news) == 0:
return 'No news for that day, try another'
else:
result_string = ''
result_string += f'\nFeed: {self.feed}\n\n'
for article in self.news:
result_string += f'Title: {article.title}\nDate: {article.date}\nUrl: {article.url}\n\n'
for link in article.links:
if link.type == 'image':
result_string += f'[image {link.id + 1} : {link.alt}][{link.id + 1}]'
result_string += f'{article.description}\n\n'
break
result_string += f'Links:\n'
for link in article.links:
if link.type == 'image':
if link.url:
result_string += f'[{link.id + 1}]: {link.url} ({link.type})\n'
else:
result_string += f'[{link.id + 1}]: {link.alt} (invalid url or no image)({link.type})\n'
else:
result_string += f'[{link.id + 1}]: {link.url} ({link.type})\n'
result_string += f'\n'
if self.verbose:
self.logger.info('Feed was converted to text format')
return result_string
def feed_to_html(self):
"""
This function converts current feed to string to be written to HTML file
:return: result_string - string containing news to be written to HTML file
"""
result_string = ''
result_string += f'<!DOCTYPE html><html><title>rss-feed</title>'
result_string += f'<body><h3>Feed: {self.feed}</h3>'
for article in self.news:
result_string += f'<h4 style="display:inline">Title:</h4><span> {article.title}</span><br>' \
f'<h4 style="display:inline">Date:</h4><span> {article.date}</span><br>' \
f'<h4 style="display:inline">Url:</h4><span> {article.url}</span><br><br>'
for link in article.links:
if link.type == 'image':
result_string += f'<img src="{link.url}" width="10%"><br><br>'
result_string += f'<span>{article.description}</span><br><br>'
break
result_string += f'<span>Links:</span><br>'
for link in article.links:
if link.type == 'image':
if link.url:
result_string += f'<span>[{link.id + 1}]: </span>' \
f'<a href="{link.url}">{link.alt}({link.type})</a><br>'
else:
result_string += f'<span>[{link.id + 1}]: </span>' \
f'<span>{link.alt}(invalid url or no image)({link.type})</span><br>'
else:
result_string += f'<span>[{link.id + 1}]: </span>' \
f'<a href="{link.url}">{link.url}({link.type})</a><br>'
result_string += f'</body></html><br>'
if self.verbose:
self.logger.info('Feed was converted to HTML format')
return result_string
def feed_to_pdf(self):
"""
This function converts current feed to PDF document
:return: pdf - PDF document containing news feed
"""
pdf = FPDF()
pdf.add_page()
font_path = os.path.join(os.path.dirname(__file__), 'fonts', 'ttf', 'DejaVuSerifCondensed.ttf')
pdf.add_font('DejaVu', '', font_path, uni=True)
pdf.set_font('DejaVu', '', 14)
pdf.set_margins(10, 10, 5)
pdf.cell(w=0, h=5, txt=self.feed)
pdf.ln()
pdf.ln()
for article in self.news:
pdf.set_font_size(12)
pdf.multi_cell(w=0, h=5, txt=f'Title: {article.title}')
pdf.multi_cell(w=0, h=5, txt=f'Date: {article.date}')
pdf.multi_cell(w=0, h=5, txt=f'Url: {article.url}')
pdf.ln()
images = self.download_images(article, self.img_path, self.news.index(article))
if len(images):
if images[0]:
pdf.image(images[0], w=30)
pdf.ln()
pdf.multi_cell(w=0, h=5, txt=article.description)
pdf.ln()
pdf.cell(w=0, h=5, txt=f'Links:')
pdf.ln()
for link in article.links:
if link.type == 'image':
if link.url:
pdf.multi_cell(w=0, h=5, txt=f'[{link.id + 1}]: {link.url} ({link.type})')
else:
pdf.multi_cell(w=0, h=5, txt=f'[{link.id + 1}]: {link.alt} (invalid url or no image)'
f'({link.type})')
else:
pdf.multi_cell(w=0, h=5, txt=f'[{link.id + 1}]: {link.url} ({link.type})')
pdf.ln()
pdf.ln()
if self.verbose:
self.logger.info('Feed was converted to PDF format')
return pdf
def cache_feed_to_json_file(self):
"""
This function caches current feed to cache .json file
:return: None
"""
cache_file_path = os.path.join(self.data_path, "news_cache.json")
if not os.path.exists(cache_file_path):
cache_file = open(cache_file_path, 'w+')
cache_file.close()
if self.verbose:
self.logger.info(f'News cache has been created. '
f'Path to cache file: {cache_file_path}')
json_feed = self.feed_to_json()
if os.path.getsize(cache_file_path) > 0:
with open(cache_file_path, 'r') as cache_file:
json_cache = json.load(cache_file)
found = False
for feed in json_cache['news']:
if feed['url'] == self.url:
found = True
cached_news = 0
for news in json_feed['news_objects']:
if news not in feed['news_objects']:
feed['news_objects'].append(news)
cached_news += 1
if not found:
json_cache['news'].append(json_feed)
cached_news = len(json_feed['news_objects'])
total_cached_news = 0
for feed in json_cache['news']:
total_cached_news += len(feed['news_objects'])
with open(cache_file_path, 'w') as cache_file:
json.dump(json_cache, cache_file)
else:
with open(cache_file_path, 'w') as cache_file:
json_file_format = {'news': [json_feed]}
json.dump(json_file_format, cache_file)
cached_news = total_cached_news = len(json_feed['news_objects'])
if self.verbose:
self.logger.info(f'{cached_news} online news have been saved in local cache (duplicates were removed)')
self.logger.info(f'{total_cached_news} online news are cached in the file now')
def cache_feed_to_html_file(self):
"""
This function caches current feed to cache HTML file
:return: None
"""
if self.html_path == "default":
cache_file_path = os.path.join(self.data_path, 'news_cache.html')
else:
if self.html_path == os.path.abspath(self.html_path):
cache_file_path = self.html_path
else:
cache_file_path = os.path.join(os.getcwd(), self.html_path)
if not os.path.exists(cache_file_path):
html_cache_file = open(cache_file_path, "w+")
html_cache_file.close()
if os.path.isfile(cache_file_path):
with open(cache_file_path, 'w+') as cache_file:
cache_file.write(self.feed_to_html())
if self.verbose:
self.logger.info(f'News have been cached to HTML file. Path to file: {cache_file_path}')
def cache_feed_to_pdf_file(self):
"""
This function caches current feed to cache PDF file
:return: None
"""
if self.pdf_path == "default":
cache_file_path = os.path.join(self.data_path, 'news_cache.pdf')
else:
if self.pdf_path == os.path.abspath(self.pdf_path):
cache_file_path = self.pdf_path
else:
cache_file_path = os.path.join(os.getcwd(), self.pdf_path)
if not os.path.exists(cache_file_path):
pdf_cache_file = open(cache_file_path, "w+")
pdf_cache_file.close()
pdf = self.feed_to_pdf()
if os.path.isfile(cache_file_path):
pdf.output(cache_file_path)
if self.verbose:
self.logger.info(f'News have been cached to PDF file. Path to file: {cache_file_path}')
@staticmethod
def create_logger(logging_module: str):
"""
This function creates logger
:param logging_module: logging module to be used
:return: logger - logger for current module
"""
logger = logging.getLogger(logging_module)
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(funcName)s - %(levelname)s - %(message)s')
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
return logger
def create_folder(self, path: str, folder_name: str) -> str:
"""
This function creates new folder
:param path: path where new folder will be created
:param folder_name: name of new folder
:return: new_folder_path - path to created folder
"""
if os.path.exists(path):
new_folder_path = os.path.join(path, folder_name)
if not os.path.exists(new_folder_path):
os.mkdir(new_folder_path)
if self.verbose:
self.logger.info(f'New folder was created. Path to folder: {new_folder_path}')
return new_folder_path
@staticmethod
def download_content_from_url(dest: str, source: str, name: str) -> str:
"""
This function downloads file from URL
:param dest: folder to save file
:param source: url to file
:param name: name of downloaded file
:return: path_to_file - path to downloaded file
"""
path_to_file = os.path.join(dest, name)
resource = requests.get(source)
with open(path_to_file, 'wb') as content_file:
content_file.write(resource.content)
return path_to_file
def download_images(self, article: namedtuple, path: str, article_index: int) -> list:
"""
:param article: article from which images are downloaded
:param path: path to store downloaded images
:param article_index: article index in feed list
:return: images - list of images paths in local storage
"""
images = []
image_index = 0
for link in article.links:
if link.type == 'image':
if link.url:
image_path = self.download_content_from_url(path, link.url, f'{article_index}_{image_index}.jpg')
images.append(image_path)
image_index += 1
return images | 0.507812 | 0.102709 |
import json
import random
import multiprocessing
import time
#Local utils
from utils.messaging import PanMessaging
#msg_subscriber = PanMessaging.create_subscriber(6511)
def create_forwarder(port):
try:
PanMessaging.create_forwarder(port, port + 1)
except Exception:
pass
msg_forwarder_process = multiprocessing.Process(
target=create_forwarder, args=(
6510,), name='MsgForwarder')
msg_forwarder_process.start()
msg_publisher = PanMessaging.create_publisher(6510)
sample_msgs = [
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 55.118,
"current_ha": 1.021,
"current_ra": 15.314,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": True
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": True,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": True,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.592,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:06",
"localtime": "2020-07-07 18:40:03.519329+02:00",
"siderealtime": "12h06m11.7216s",
"utctime": "2020-07-07 16:40:03"
},
"scheduler": None
},
"state": "scheduling",
"system": {
"free_space": 145.688
}
}
],
[
"PANCHAT",
{
"message": "Ok, I'm finding something good to look at...",
"timestamp": "2020-07-07 16:40:04"
}
],
[
"PANCHAT",
{
"message": "No valid observations found. Cannot schedule. Going to park.",
"timestamp": "2020-07-07 16:40:04"
}
],
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 55.118,
"current_ha": 1.021,
"current_ra": 15.314,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": True
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": True,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": True,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.589,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:05",
"localtime": "2020-07-07 18:40:04.526112+02:00",
"siderealtime": "12h06m12.6691s",
"utctime": "2020-07-07 16:40:04"
},
"scheduler": None
},
"state": "parking",
"system": {
"free_space": 145.688
}
}
],
[
"PANCHAT",
{
"message": "Taking it on home and then parking.",
"timestamp": "2020-07-07 16:40:05"
}
],
[
"WEATHER",
{
"data": {
"WEATHER_FORECAST": 0.0,
"WEATHER_RAIN_HOUR": 0.0,
"WEATHER_TEMPERATURE": 15.0,
"WEATHER_WIND_GUST": 0.0,
"WEATHER_WIND_SPEED": 10.0,
"date": "2020-07-07T16:40:30.742775+00:00",
"safe": True,
"state": "OK",
"weather_sensor_name": "Weather Simulator"
}
}
],
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 0.0,
"current_ha": 24.0,
"current_ra": 360.0,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": False
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": False,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": False,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.511,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:05",
"localtime": "2020-07-07 18:40:33.414082+02:00",
"siderealtime": "12h06m41.67s",
"utctime": "2020-07-07 16:40:33"
},
"scheduler": None
},
"state": "parked",
"system": {
"free_space": 145.688
}
}
],
[
"PANCHAT",
{
"message": "No observations found.",
"timestamp": "2020-07-07 16:40:34"
}
],
[
"PANCHAT",
{
"message": "Going to stay parked for half an hour then will try again.",
"timestamp": "2020-07-07 16:40:34"
}
],
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 0.0,
"current_ha": 24.0,
"current_ra": 360.0,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": False
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": False,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": False,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.509,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:05",
"localtime": "2020-07-07 18:40:34.260980+02:00",
"siderealtime": "12h06m42.4857s",
"utctime": "2020-07-07 16:40:34"
},
"scheduler": None
},
"state": "parked",
"system": {
"free_space": 145.688
}
}
],
[
"WEATHER",
{
"data": {
"WEATHER_FORECAST": 0.0,
"WEATHER_RAIN_HOUR": 0.0,
"WEATHER_TEMPERATURE": 15.0,
"WEATHER_WIND_GUST": 0.0,
"WEATHER_WIND_SPEED": 10.0,
"date": "2020-07-07T16:41:30.816454+00:00",
"safe": True,
"state": "OK",
"weather_sensor_name": "Weather Simulator"
}
}
]]
while True:
#msg = msg_subscriber.receive_message()
#print(json.dumps(msg, indent=4, sort_keys=True))
channel, msg = random.choice(sample_msgs)
msg_publisher.send_message(channel, msg)
time.sleep(4)
#launch with PYTHONPATH=. python3 ../PAWS/launch_PanMsg_generator.py | launch_PanMsg_generator.py | import json
import random
import multiprocessing
import time
#Local utils
from utils.messaging import PanMessaging
#msg_subscriber = PanMessaging.create_subscriber(6511)
def create_forwarder(port):
try:
PanMessaging.create_forwarder(port, port + 1)
except Exception:
pass
msg_forwarder_process = multiprocessing.Process(
target=create_forwarder, args=(
6510,), name='MsgForwarder')
msg_forwarder_process.start()
msg_publisher = PanMessaging.create_publisher(6510)
sample_msgs = [
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 55.118,
"current_ha": 1.021,
"current_ra": 15.314,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": True
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": True,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": True,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.592,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:06",
"localtime": "2020-07-07 18:40:03.519329+02:00",
"siderealtime": "12h06m11.7216s",
"utctime": "2020-07-07 16:40:03"
},
"scheduler": None
},
"state": "scheduling",
"system": {
"free_space": 145.688
}
}
],
[
"PANCHAT",
{
"message": "Ok, I'm finding something good to look at...",
"timestamp": "2020-07-07 16:40:04"
}
],
[
"PANCHAT",
{
"message": "No valid observations found. Cannot schedule. Going to park.",
"timestamp": "2020-07-07 16:40:04"
}
],
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 55.118,
"current_ha": 1.021,
"current_ra": 15.314,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": True
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": True,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": True,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.589,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:05",
"localtime": "2020-07-07 18:40:04.526112+02:00",
"siderealtime": "12h06m12.6691s",
"utctime": "2020-07-07 16:40:04"
},
"scheduler": None
},
"state": "parking",
"system": {
"free_space": 145.688
}
}
],
[
"PANCHAT",
{
"message": "Taking it on home and then parking.",
"timestamp": "2020-07-07 16:40:05"
}
],
[
"WEATHER",
{
"data": {
"WEATHER_FORECAST": 0.0,
"WEATHER_RAIN_HOUR": 0.0,
"WEATHER_TEMPERATURE": 15.0,
"WEATHER_WIND_GUST": 0.0,
"WEATHER_WIND_SPEED": 10.0,
"date": "2020-07-07T16:40:30.742775+00:00",
"safe": True,
"state": "OK",
"weather_sensor_name": "Weather Simulator"
}
}
],
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 0.0,
"current_ha": 24.0,
"current_ra": 360.0,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": False
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": False,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": False,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.511,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:05",
"localtime": "2020-07-07 18:40:33.414082+02:00",
"siderealtime": "12h06m41.67s",
"utctime": "2020-07-07 16:40:33"
},
"scheduler": None
},
"state": "parked",
"system": {
"free_space": 145.688
}
}
],
[
"PANCHAT",
{
"message": "No observations found.",
"timestamp": "2020-07-07 16:40:34"
}
],
[
"PANCHAT",
{
"message": "Going to stay parked for half an hour then will try again.",
"timestamp": "2020-07-07 16:40:34"
}
],
[
"STATUS",
{
"observatory": {
"mount": {
"current_dec": 0.0,
"current_ha": 24.0,
"current_ra": 360.0,
"guide_rate_ns": 0.5,
"guide_rate_we": 0.5,
"slew_rate": "3x",
"track_mode": "TRACK_SIDEREAL"
},
"observatory": {
"altitude": 150.0,
"dome": {
"is_open": False
},
"location": {
"latitude": 43.56,
"longitude": 5.43
},
"owner": "gnthibault",
"scope": {
"camera_relay": False,
"corrector_dew": False,
"finder_dew": False,
"finder_dustcap_open": False,
"flat_panel": False,
"mount_relay": False,
"scope_dew": False,
"scope_dustcap_open": False,
"scope_fan": False
},
"timezone": "Europe/Paris"
},
"observer": {
"local_evening_astro_time": "21:39:50",
"local_moon_alt": -48.509,
"local_moon_illumination": 0.93,
"local_moon_phase": 0.535,
"local_morning_astro_time": "01:47:02",
"local_sun_rise_time": "04:09:51",
"local_sun_set_time": "19:17:05",
"localtime": "2020-07-07 18:40:34.260980+02:00",
"siderealtime": "12h06m42.4857s",
"utctime": "2020-07-07 16:40:34"
},
"scheduler": None
},
"state": "parked",
"system": {
"free_space": 145.688
}
}
],
[
"WEATHER",
{
"data": {
"WEATHER_FORECAST": 0.0,
"WEATHER_RAIN_HOUR": 0.0,
"WEATHER_TEMPERATURE": 15.0,
"WEATHER_WIND_GUST": 0.0,
"WEATHER_WIND_SPEED": 10.0,
"date": "2020-07-07T16:41:30.816454+00:00",
"safe": True,
"state": "OK",
"weather_sensor_name": "Weather Simulator"
}
}
]]
while True:
#msg = msg_subscriber.receive_message()
#print(json.dumps(msg, indent=4, sort_keys=True))
channel, msg = random.choice(sample_msgs)
msg_publisher.send_message(channel, msg)
time.sleep(4)
#launch with PYTHONPATH=. python3 ../PAWS/launch_PanMsg_generator.py | 0.31542 | 0.255493 |
import unittest
import larry as lry
ENVIRONMENT_PROD = 'production'
ENVIRONMENT_SANDBOX = 'sandbox'
SANDBOX_HIT = '39HYCOOPKNK26VOMWWPV050D1O9MD5'
SANDBOX_HIT_TYPE = '3W679PTMVMW4B1YPP05F1CL2SYKBXP'
SANDBOX_ASSIGNMENT = '3TEM0PF1Q5W8Q0F8XU7ZRSPG1ARD0O'
PROD_HIT = '30Y6N4AHYOVT3B1E15NSX07Z8YNRDS'
PROD_HIT_TYPE = '32CVJ4DS80UD0FXOVYK5MQJIWDSKV8'
PROD_ASSIGNMENT = '3N4BPTXIO8RWKSXYNI9LV8K4SNYUK5'
SIMPLE_QUESTION = '<script src="https://assets.crowd.aws/crowd-html-elements.js"></script><crowd-form><p>What is the date today?</p><input name="date"></crowd-form>'
SIMPLE_TEMPLATE = '<script src="https://assets.crowd.aws/crowd-html-elements.js"></script><crowd-form><p>What day of the week was {{ date }}?</p><input name="date"></crowd-form>'
SIMPLE_TEMPLATE_URI = 's3://larry-testing/test-objects/mturk/simple_template.html'
BASIC_ANNOTATION_DICT = {'path': 'detail'}
BASIC_ANNOTATION_STRING = 'For easier data science use Larry'
EXTERNAL_URL = 'https://www.google.com'
class MTurkTests(unittest.TestCase):
def test_use_production(self):
lry.mturk.use_production()
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_use_sandbox(self):
lry.mturk.use_sandbox()
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod(self):
lry.mturk.set_environment('prod')
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox(self):
lry.mturk.set_environment('sandbox')
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod_hit(self):
lry.mturk.set_environment(hit_id=PROD_HIT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox_hit(self):
lry.mturk.set_environment(hit_id=SANDBOX_HIT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod_assignment(self):
lry.mturk.set_environment(assignment_id=PROD_ASSIGNMENT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox_assignment(self):
lry.mturk.set_environment(assignment_id=SANDBOX_ASSIGNMENT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_create_hit(self):
lry.mturk.use_sandbox()
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward_cents=10, lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
html_question=SIMPLE_QUESTION, annotation=BASIC_ANNOTATION_DICT)
self.assertFalse(hit.production)
hit = lry.mturk.get_hit(hit.hit_id)
self.assertEqual(hit.annotation, BASIC_ANNOTATION_DICT)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward_cents=10, lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
html_question=SIMPLE_QUESTION, annotation=BASIC_ANNOTATION_STRING)
self.assertFalse(hit.production)
hit = lry.mturk.get_hit(hit.hit_id)
self.assertEqual(hit.annotation, BASIC_ANNOTATION_STRING)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question=lry.mturk.render_html_question(SIMPLE_QUESTION))
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question=lry.mturk.render_external_question(EXTERNAL_URL))
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
external_question=EXTERNAL_URL)
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question_template=SIMPLE_TEMPLATE, template_context={'date': '2/13/2020'})
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question_template_uri=SIMPLE_TEMPLATE_URI, template_context={'date': '2/13/2020'})
self.assertFalse(hit.production)
def test_create_by_hit_type(self):
lry.mturk.use_sandbox()
hit_type_id = lry.mturk.create_hit_type(title="Simple task", description="Answer a simple question",
reward="0.10", assignment_duration=60)
hit_type_id = lry.mturk.create_hit_type(title="Simple task", description="Answer a simple question",
reward_cents=10, assignment_duration=60, auto_approval_delay=60,
keywords='foo,bar')
hit = lry.mturk.create_hit(hit_type_id=hit_type_id, lifetime=60, max_assignments=1,
html_question=SIMPLE_QUESTION)
self.assertFalse(hit.production)
if __name__ == '__main__':
unittest.main() | test/mturk.py | import unittest
import larry as lry
ENVIRONMENT_PROD = 'production'
ENVIRONMENT_SANDBOX = 'sandbox'
SANDBOX_HIT = '39HYCOOPKNK26VOMWWPV050D1O9MD5'
SANDBOX_HIT_TYPE = '3W679PTMVMW4B1YPP05F1CL2SYKBXP'
SANDBOX_ASSIGNMENT = '3TEM0PF1Q5W8Q0F8XU7ZRSPG1ARD0O'
PROD_HIT = '30Y6N4AHYOVT3B1E15NSX07Z8YNRDS'
PROD_HIT_TYPE = '32CVJ4DS80UD0FXOVYK5MQJIWDSKV8'
PROD_ASSIGNMENT = '3N4BPTXIO8RWKSXYNI9LV8K4SNYUK5'
SIMPLE_QUESTION = '<script src="https://assets.crowd.aws/crowd-html-elements.js"></script><crowd-form><p>What is the date today?</p><input name="date"></crowd-form>'
SIMPLE_TEMPLATE = '<script src="https://assets.crowd.aws/crowd-html-elements.js"></script><crowd-form><p>What day of the week was {{ date }}?</p><input name="date"></crowd-form>'
SIMPLE_TEMPLATE_URI = 's3://larry-testing/test-objects/mturk/simple_template.html'
BASIC_ANNOTATION_DICT = {'path': 'detail'}
BASIC_ANNOTATION_STRING = 'For easier data science use Larry'
EXTERNAL_URL = 'https://www.google.com'
class MTurkTests(unittest.TestCase):
def test_use_production(self):
lry.mturk.use_production()
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_use_sandbox(self):
lry.mturk.use_sandbox()
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod(self):
lry.mturk.set_environment('prod')
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox(self):
lry.mturk.set_environment('sandbox')
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod_hit(self):
lry.mturk.set_environment(hit_id=PROD_HIT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox_hit(self):
lry.mturk.set_environment(hit_id=SANDBOX_HIT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_set_environment_prod_assignment(self):
lry.mturk.set_environment(assignment_id=PROD_ASSIGNMENT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_PROD)
self.assertTrue(lry.mturk.production())
self.assertFalse(lry.mturk.sandbox())
def test_set_environment_sandbox_assignment(self):
lry.mturk.set_environment(assignment_id=SANDBOX_ASSIGNMENT)
self.assertEqual(lry.mturk.environment(), ENVIRONMENT_SANDBOX)
self.assertTrue(lry.mturk.sandbox())
self.assertFalse(lry.mturk.production())
def test_create_hit(self):
lry.mturk.use_sandbox()
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward_cents=10, lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
html_question=SIMPLE_QUESTION, annotation=BASIC_ANNOTATION_DICT)
self.assertFalse(hit.production)
hit = lry.mturk.get_hit(hit.hit_id)
self.assertEqual(hit.annotation, BASIC_ANNOTATION_DICT)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward_cents=10, lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
html_question=SIMPLE_QUESTION, annotation=BASIC_ANNOTATION_STRING)
self.assertFalse(hit.production)
hit = lry.mturk.get_hit(hit.hit_id)
self.assertEqual(hit.annotation, BASIC_ANNOTATION_STRING)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question=lry.mturk.render_html_question(SIMPLE_QUESTION))
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question=lry.mturk.render_external_question(EXTERNAL_URL))
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
external_question=EXTERNAL_URL)
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question_template=SIMPLE_TEMPLATE, template_context={'date': '2/13/2020'})
self.assertFalse(hit.production)
hit = lry.mturk.create_hit("Simple task", "Answer a simple question", reward='0.10', lifetime=60,
assignment_duration=60, max_assignments=1, auto_approval_delay=600,
question_template_uri=SIMPLE_TEMPLATE_URI, template_context={'date': '2/13/2020'})
self.assertFalse(hit.production)
def test_create_by_hit_type(self):
lry.mturk.use_sandbox()
hit_type_id = lry.mturk.create_hit_type(title="Simple task", description="Answer a simple question",
reward="0.10", assignment_duration=60)
hit_type_id = lry.mturk.create_hit_type(title="Simple task", description="Answer a simple question",
reward_cents=10, assignment_duration=60, auto_approval_delay=60,
keywords='foo,bar')
hit = lry.mturk.create_hit(hit_type_id=hit_type_id, lifetime=60, max_assignments=1,
html_question=SIMPLE_QUESTION)
self.assertFalse(hit.production)
if __name__ == '__main__':
unittest.main() | 0.500488 | 0.298849 |
from typing import Optional
from rap.client.core import BaseClient
from rap.client.endpoint import BalanceEnum
from rap.client.endpoint.consul import ConsulEndpoint
class Client(BaseClient):
def __init__(
self,
server_name: str,
keep_alive_timeout: int = 1200,
ssl_crt_path: Optional[str] = None,
cache_interval: Optional[float] = None,
ws_min_interval: Optional[int] = None,
ws_max_interval: Optional[int] = None,
ws_statistics_interval: Optional[int] = None,
select_conn_method: BalanceEnum = BalanceEnum.random,
min_ping_interval: Optional[int] = None,
max_ping_interval: Optional[int] = None,
ping_fail_cnt: Optional[int] = None,
through_deadline: bool = False,
max_pool_size: Optional[int] = None,
min_poll_size: Optional[int] = None,
# consul client param
consul_namespace: str = "rap",
consul_ttl: int = 10,
consul_host: str = "127.0.0.1",
consul_port: int = 8500,
consul_token: Optional[str] = None,
consul_scheme: str = "http",
consul_consistency: str = "default",
consul_dc: Optional[str] = None,
consul_verify: bool = True,
consul_cert: Optional[str] = None,
):
super().__init__(
server_name,
cache_interval=cache_interval,
ws_min_interval=ws_min_interval,
ws_max_interval=ws_max_interval,
ws_statistics_interval=ws_statistics_interval,
through_deadline=through_deadline,
)
self.endpoint = ConsulEndpoint(
self,
ssl_crt_path=ssl_crt_path,
read_timeout=keep_alive_timeout,
balance_enum=select_conn_method,
ping_fail_cnt=ping_fail_cnt,
min_ping_interval=min_ping_interval,
max_ping_interval=max_ping_interval,
max_pool_size=max_pool_size,
min_poll_size=min_poll_size,
consul_namespace=consul_namespace,
consul_ttl=consul_ttl,
consul_host=consul_host,
consul_port=consul_port,
consul_token=consul_token,
consul_scheme=consul_scheme,
consul_consistency=consul_consistency,
consul_dc=consul_dc,
consul_verify=consul_verify,
consul_cert=consul_cert,
) | rap/client/extend_client/consul.py | from typing import Optional
from rap.client.core import BaseClient
from rap.client.endpoint import BalanceEnum
from rap.client.endpoint.consul import ConsulEndpoint
class Client(BaseClient):
def __init__(
self,
server_name: str,
keep_alive_timeout: int = 1200,
ssl_crt_path: Optional[str] = None,
cache_interval: Optional[float] = None,
ws_min_interval: Optional[int] = None,
ws_max_interval: Optional[int] = None,
ws_statistics_interval: Optional[int] = None,
select_conn_method: BalanceEnum = BalanceEnum.random,
min_ping_interval: Optional[int] = None,
max_ping_interval: Optional[int] = None,
ping_fail_cnt: Optional[int] = None,
through_deadline: bool = False,
max_pool_size: Optional[int] = None,
min_poll_size: Optional[int] = None,
# consul client param
consul_namespace: str = "rap",
consul_ttl: int = 10,
consul_host: str = "127.0.0.1",
consul_port: int = 8500,
consul_token: Optional[str] = None,
consul_scheme: str = "http",
consul_consistency: str = "default",
consul_dc: Optional[str] = None,
consul_verify: bool = True,
consul_cert: Optional[str] = None,
):
super().__init__(
server_name,
cache_interval=cache_interval,
ws_min_interval=ws_min_interval,
ws_max_interval=ws_max_interval,
ws_statistics_interval=ws_statistics_interval,
through_deadline=through_deadline,
)
self.endpoint = ConsulEndpoint(
self,
ssl_crt_path=ssl_crt_path,
read_timeout=keep_alive_timeout,
balance_enum=select_conn_method,
ping_fail_cnt=ping_fail_cnt,
min_ping_interval=min_ping_interval,
max_ping_interval=max_ping_interval,
max_pool_size=max_pool_size,
min_poll_size=min_poll_size,
consul_namespace=consul_namespace,
consul_ttl=consul_ttl,
consul_host=consul_host,
consul_port=consul_port,
consul_token=consul_token,
consul_scheme=consul_scheme,
consul_consistency=consul_consistency,
consul_dc=consul_dc,
consul_verify=consul_verify,
consul_cert=consul_cert,
) | 0.862207 | 0.087058 |
from __future__ import absolute_import, division, unicode_literals
from flask.ext.restful import reqparse, types
from sqlalchemy.orm import contains_eager
from sqlalchemy.sql import func, asc, desc
from changes.api.base import APIView
from changes.api.serializer.models.bazeltarget import BazelTargetWithMessagesCrumbler
from changes.constants import Result
from changes.models.bazeltarget import BazelTarget
from changes.models.build import Build
from changes.models.job import Job
SORT_CHOICES = (
'duration',
'name'
)
RESULT_CHOICES = [r.name for r in Result] + ['']
class BuildTargetIndexAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('query', type=unicode, location='args')
parser.add_argument('result', type=unicode, location='args',
choices=RESULT_CHOICES)
parser.add_argument('sort', type=unicode, location='args',
choices=SORT_CHOICES, default='duration')
parser.add_argument('reverse', type=types.boolean, location='args',
default=False)
parser.add_argument('max_messages_per_target', type=int, location='args', default=5)
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return self.respond({}, status_code=404)
args = self.parser.parse_args()
target_list = BazelTarget.query.options(
contains_eager('job')
).join(
Job, BazelTarget.job_id == Job.id,
).filter(
Job.build_id == build.id,
)
if args.query:
target_list = target_list.filter(
func.lower(BazelTarget.name).contains(args.query.lower()),
)
if args.result:
target_list = target_list.filter(
BazelTarget.result == Result[args.result],
)
sort_col, sort_dir = None, None
if args.sort == 'duration':
sort_col, sort_dir = BazelTarget.duration, desc
elif args.sort == 'name':
sort_col, sort_dir = BazelTarget.name, asc
if args.reverse:
sort_dir = {asc: desc, desc: asc}[sort_dir]
target_list = target_list.order_by(sort_dir(sort_col))
return self.paginate(target_list, max_per_page=None, serializers={BazelTarget: BazelTargetWithMessagesCrumbler(max_messages=args.max_messages_per_target)}) | changes/api/build_target_index.py | from __future__ import absolute_import, division, unicode_literals
from flask.ext.restful import reqparse, types
from sqlalchemy.orm import contains_eager
from sqlalchemy.sql import func, asc, desc
from changes.api.base import APIView
from changes.api.serializer.models.bazeltarget import BazelTargetWithMessagesCrumbler
from changes.constants import Result
from changes.models.bazeltarget import BazelTarget
from changes.models.build import Build
from changes.models.job import Job
SORT_CHOICES = (
'duration',
'name'
)
RESULT_CHOICES = [r.name for r in Result] + ['']
class BuildTargetIndexAPIView(APIView):
parser = reqparse.RequestParser()
parser.add_argument('query', type=unicode, location='args')
parser.add_argument('result', type=unicode, location='args',
choices=RESULT_CHOICES)
parser.add_argument('sort', type=unicode, location='args',
choices=SORT_CHOICES, default='duration')
parser.add_argument('reverse', type=types.boolean, location='args',
default=False)
parser.add_argument('max_messages_per_target', type=int, location='args', default=5)
def get(self, build_id):
build = Build.query.get(build_id)
if build is None:
return self.respond({}, status_code=404)
args = self.parser.parse_args()
target_list = BazelTarget.query.options(
contains_eager('job')
).join(
Job, BazelTarget.job_id == Job.id,
).filter(
Job.build_id == build.id,
)
if args.query:
target_list = target_list.filter(
func.lower(BazelTarget.name).contains(args.query.lower()),
)
if args.result:
target_list = target_list.filter(
BazelTarget.result == Result[args.result],
)
sort_col, sort_dir = None, None
if args.sort == 'duration':
sort_col, sort_dir = BazelTarget.duration, desc
elif args.sort == 'name':
sort_col, sort_dir = BazelTarget.name, asc
if args.reverse:
sort_dir = {asc: desc, desc: asc}[sort_dir]
target_list = target_list.order_by(sort_dir(sort_col))
return self.paginate(target_list, max_per_page=None, serializers={BazelTarget: BazelTargetWithMessagesCrumbler(max_messages=args.max_messages_per_target)}) | 0.611382 | 0.094929 |
# ---------------------------------------------------------------------------
# The 'azure.keyvault.generated' namespace has been preserved in this version
# of the SDK for backwards compatibility through the preview, however it may
# be removed in subsequent versions of the SDK.
# ---------------------------------------------------------------------------
from .. import KeyVaultClient
from .. import VERSION
from ..models import Attributes as __models_Attributes
from ..models import JsonWebKey as __models_JsonWebKey
from ..models import KeyAttributes as __models_KeyAttributes
from ..models import KeyBundle as __models_KeyBundle
from ..models import KeyItem as __models_KeyItem
from ..models import SecretAttributes as __models_SecretAttributes
from ..models import SecretBundle as __models_SecretBundle
from ..models import SecretItem as __models_SecretItem
from ..models import CertificateAttributes as __models_CertificateAttributes
from ..models import CertificateItem as __models_CertificateItem
from ..models import CertificateIssuerItem as __models_CertificateIssuerItem
from ..models import KeyProperties as __models_KeyProperties
from ..models import SecretProperties as __models_SecretProperties
from ..models import SubjectAlternativeNames as __models_SubjectAlternativeNames
from ..models import X509CertificateProperties as __models_X509CertificateProperties
from ..models import Trigger as __models_Trigger
from ..models import Action as __models_Action
from ..models import LifetimeAction as __models_LifetimeAction
from ..models import IssuerParameters as __models_IssuerParameters
from ..models import CertificatePolicy as __models_CertificatePolicy
from ..models import CertificateBundle as __models_CertificateBundle
from ..models import Error as __models_Error
from ..models import CertificateOperation as __models_CertificateOperation
from ..models import IssuerCredentials as __models_IssuerCredentials
from ..models import AdministratorDetails as __models_AdministratorDetails
from ..models import OrganizationDetails as __models_OrganizationDetails
from ..models import IssuerAttributes as __models_IssuerAttributes
from ..models import IssuerBundle as __models_IssuerBundle
from ..models import Contact as __models_Contact
from ..models import Contacts as __models_Contacts
from ..models import KeyCreateParameters as __models_KeyCreateParameters
from ..models import KeyImportParameters as __models_KeyImportParameters
from ..models import KeyOperationsParameters as __models_KeyOperationsParameters
from ..models import KeySignParameters as __models_KeySignParameters
from ..models import KeyVerifyParameters as __models_KeyVerifyParameters
from ..models import KeyUpdateParameters as __models_KeyUpdateParameters
from ..models import KeyRestoreParameters as __models_KeyRestoreParameters
from ..models import SecretSetParameters as __models_SecretSetParameters
from ..models import SecretUpdateParameters as __models_SecretUpdateParameters
from ..models import CertificateCreateParameters as __models_CertificateCreateParameters
from ..models import CertificateImportParameters as __models_CertificateImportParameters
from ..models import CertificateUpdateParameters as __models_CertificateUpdateParameters
from ..models import CertificateMergeParameters as __models_CertificateMergeParameters
from ..models import CertificateIssuerSetParameters as __models_CertificateIssuerSetParameters
from ..models import CertificateIssuerUpdateParameters as __models_CertificateIssuerUpdateParameters
from ..models import CertificateOperationUpdateParameter as __models_CertificateOperationUpdateParameter
from ..models import KeyOperationResult as __models_KeyOperationResult
from ..models import KeyVerifyResult as __models_KeyVerifyResult
from ..models import BackupKeyResult as __models_BackupKeyResult
from ..models import PendingCertificateSigningRequestResult as __models_PendingCertificateSigningRequestResult
from ..models import KeyVaultError as __models_KeyVaultError
from ..models import KeyVaultErrorException as __models_KeyVaultErrorException
from ..models import KeyItemPaged as __models_KeyItemPaged
from ..models import SecretItemPaged as __models_SecretItemPaged
from ..models import CertificateItemPaged as __models_CertificateItemPaged
from ..models import CertificateIssuerItemPaged as __models_CertificateIssuerItemPaged
from ..models import JsonWebKeyType as __models_JsonWebKeyType
from ..models import KeyUsageType as __models_KeyUsageType
from ..models import ActionType as __models_ActionType
from ..models import JsonWebKeyOperation as __models_JsonWebKeyOperation
from ..models import JsonWebKeyEncryptionAlgorithm as __models_JsonWebKeyEncryptionAlgorithm
from ..models import JsonWebKeySignatureAlgorithm as __models_JsonWebKeySignatureAlgorithm
import warnings
warnings.warn("The namespace azure.keyvault.generated has been deprecated and it's contents moved to azure.keyvault", DeprecationWarning)
__all__ = ['KeyVaultClient',
'__models_Attributes',
'__models_JsonWebKey',
'__models_KeyAttributes',
'__models_KeyBundle',
'__models_KeyItem',
'__models_SecretAttributes',
'__models_SecretBundle',
'__models_SecretItem',
'__models_CertificateAttributes',
'__models_CertificateItem',
'__models_CertificateIssuerItem',
'__models_KeyProperties',
'__models_SecretProperties',
'__models_SubjectAlternativeNames',
'__models_X509CertificateProperties',
'__models_Trigger',
'__models_Action',
'__models_LifetimeAction',
'__models_IssuerParameters',
'__models_CertificatePolicy',
'__models_CertificateBundle',
'__models_Error',
'__models_CertificateOperation',
'__models_IssuerCredentials',
'__models_AdministratorDetails',
'__models_OrganizationDetails',
'__models_IssuerAttributes',
'__models_IssuerBundle',
'__models_Contact',
'__models_Contacts',
'__models_KeyCreateParameters',
'__models_KeyImportParameters',
'__models_KeyOperationsParameters',
'__models_KeySignParameters',
'__models_KeyVerifyParameters',
'__models_KeyUpdateParameters',
'__models_KeyRestoreParameters',
'__models_SecretSetParameters',
'__models_SecretUpdateParameters',
'__models_CertificateCreateParameters',
'__models_CertificateImportParameters',
'__models_CertificateUpdateParameters',
'__models_CertificateMergeParameters',
'__models_CertificateIssuerSetParameters',
'__models_CertificateIssuerUpdateParameters',
'__models_CertificateOperationUpdateParameter',
'__models_KeyOperationResult',
'__models_KeyVerifyResult',
'__models_BackupKeyResult',
'__models_PendingCertificateSigningRequestResult',
'__models_KeyVaultError',
'__models_KeyVaultErrorException',
'__models_KeyItemPaged',
'__models_SecretItemPaged',
'__models_CertificateItemPaged',
'__models_CertificateIssuerItemPaged',
'__models_JsonWebKeyType',
'__models_KeyUsageType',
'__models_ActionType',
'__models_JsonWebKeyOperation',
'__models_JsonWebKeyEncryptionAlgorithm',
'__models_JsonWebKeySignatureAlgorithm',]
__version__ = VERSION | azure-keyvault/azure/keyvault/generated/__init__.py |
# ---------------------------------------------------------------------------
# The 'azure.keyvault.generated' namespace has been preserved in this version
# of the SDK for backwards compatibility through the preview, however it may
# be removed in subsequent versions of the SDK.
# ---------------------------------------------------------------------------
from .. import KeyVaultClient
from .. import VERSION
from ..models import Attributes as __models_Attributes
from ..models import JsonWebKey as __models_JsonWebKey
from ..models import KeyAttributes as __models_KeyAttributes
from ..models import KeyBundle as __models_KeyBundle
from ..models import KeyItem as __models_KeyItem
from ..models import SecretAttributes as __models_SecretAttributes
from ..models import SecretBundle as __models_SecretBundle
from ..models import SecretItem as __models_SecretItem
from ..models import CertificateAttributes as __models_CertificateAttributes
from ..models import CertificateItem as __models_CertificateItem
from ..models import CertificateIssuerItem as __models_CertificateIssuerItem
from ..models import KeyProperties as __models_KeyProperties
from ..models import SecretProperties as __models_SecretProperties
from ..models import SubjectAlternativeNames as __models_SubjectAlternativeNames
from ..models import X509CertificateProperties as __models_X509CertificateProperties
from ..models import Trigger as __models_Trigger
from ..models import Action as __models_Action
from ..models import LifetimeAction as __models_LifetimeAction
from ..models import IssuerParameters as __models_IssuerParameters
from ..models import CertificatePolicy as __models_CertificatePolicy
from ..models import CertificateBundle as __models_CertificateBundle
from ..models import Error as __models_Error
from ..models import CertificateOperation as __models_CertificateOperation
from ..models import IssuerCredentials as __models_IssuerCredentials
from ..models import AdministratorDetails as __models_AdministratorDetails
from ..models import OrganizationDetails as __models_OrganizationDetails
from ..models import IssuerAttributes as __models_IssuerAttributes
from ..models import IssuerBundle as __models_IssuerBundle
from ..models import Contact as __models_Contact
from ..models import Contacts as __models_Contacts
from ..models import KeyCreateParameters as __models_KeyCreateParameters
from ..models import KeyImportParameters as __models_KeyImportParameters
from ..models import KeyOperationsParameters as __models_KeyOperationsParameters
from ..models import KeySignParameters as __models_KeySignParameters
from ..models import KeyVerifyParameters as __models_KeyVerifyParameters
from ..models import KeyUpdateParameters as __models_KeyUpdateParameters
from ..models import KeyRestoreParameters as __models_KeyRestoreParameters
from ..models import SecretSetParameters as __models_SecretSetParameters
from ..models import SecretUpdateParameters as __models_SecretUpdateParameters
from ..models import CertificateCreateParameters as __models_CertificateCreateParameters
from ..models import CertificateImportParameters as __models_CertificateImportParameters
from ..models import CertificateUpdateParameters as __models_CertificateUpdateParameters
from ..models import CertificateMergeParameters as __models_CertificateMergeParameters
from ..models import CertificateIssuerSetParameters as __models_CertificateIssuerSetParameters
from ..models import CertificateIssuerUpdateParameters as __models_CertificateIssuerUpdateParameters
from ..models import CertificateOperationUpdateParameter as __models_CertificateOperationUpdateParameter
from ..models import KeyOperationResult as __models_KeyOperationResult
from ..models import KeyVerifyResult as __models_KeyVerifyResult
from ..models import BackupKeyResult as __models_BackupKeyResult
from ..models import PendingCertificateSigningRequestResult as __models_PendingCertificateSigningRequestResult
from ..models import KeyVaultError as __models_KeyVaultError
from ..models import KeyVaultErrorException as __models_KeyVaultErrorException
from ..models import KeyItemPaged as __models_KeyItemPaged
from ..models import SecretItemPaged as __models_SecretItemPaged
from ..models import CertificateItemPaged as __models_CertificateItemPaged
from ..models import CertificateIssuerItemPaged as __models_CertificateIssuerItemPaged
from ..models import JsonWebKeyType as __models_JsonWebKeyType
from ..models import KeyUsageType as __models_KeyUsageType
from ..models import ActionType as __models_ActionType
from ..models import JsonWebKeyOperation as __models_JsonWebKeyOperation
from ..models import JsonWebKeyEncryptionAlgorithm as __models_JsonWebKeyEncryptionAlgorithm
from ..models import JsonWebKeySignatureAlgorithm as __models_JsonWebKeySignatureAlgorithm
import warnings
warnings.warn("The namespace azure.keyvault.generated has been deprecated and it's contents moved to azure.keyvault", DeprecationWarning)
__all__ = ['KeyVaultClient',
'__models_Attributes',
'__models_JsonWebKey',
'__models_KeyAttributes',
'__models_KeyBundle',
'__models_KeyItem',
'__models_SecretAttributes',
'__models_SecretBundle',
'__models_SecretItem',
'__models_CertificateAttributes',
'__models_CertificateItem',
'__models_CertificateIssuerItem',
'__models_KeyProperties',
'__models_SecretProperties',
'__models_SubjectAlternativeNames',
'__models_X509CertificateProperties',
'__models_Trigger',
'__models_Action',
'__models_LifetimeAction',
'__models_IssuerParameters',
'__models_CertificatePolicy',
'__models_CertificateBundle',
'__models_Error',
'__models_CertificateOperation',
'__models_IssuerCredentials',
'__models_AdministratorDetails',
'__models_OrganizationDetails',
'__models_IssuerAttributes',
'__models_IssuerBundle',
'__models_Contact',
'__models_Contacts',
'__models_KeyCreateParameters',
'__models_KeyImportParameters',
'__models_KeyOperationsParameters',
'__models_KeySignParameters',
'__models_KeyVerifyParameters',
'__models_KeyUpdateParameters',
'__models_KeyRestoreParameters',
'__models_SecretSetParameters',
'__models_SecretUpdateParameters',
'__models_CertificateCreateParameters',
'__models_CertificateImportParameters',
'__models_CertificateUpdateParameters',
'__models_CertificateMergeParameters',
'__models_CertificateIssuerSetParameters',
'__models_CertificateIssuerUpdateParameters',
'__models_CertificateOperationUpdateParameter',
'__models_KeyOperationResult',
'__models_KeyVerifyResult',
'__models_BackupKeyResult',
'__models_PendingCertificateSigningRequestResult',
'__models_KeyVaultError',
'__models_KeyVaultErrorException',
'__models_KeyItemPaged',
'__models_SecretItemPaged',
'__models_CertificateItemPaged',
'__models_CertificateIssuerItemPaged',
'__models_JsonWebKeyType',
'__models_KeyUsageType',
'__models_ActionType',
'__models_JsonWebKeyOperation',
'__models_JsonWebKeyEncryptionAlgorithm',
'__models_JsonWebKeySignatureAlgorithm',]
__version__ = VERSION | 0.580471 | 0.040276 |
from gitexd.interfaces import IAuth
from gitexd.tests import ApplicationTest, formatRemote, AuthenticationTest
from gitexd.tests.plugins import keyAuth, passAuth
__author__ = 'christophe'
class KeyAuthenticationTests(AuthenticationTest):
def setUp(self):
ApplicationTest.setUp(self)
self.startApplication(pluginPackages={
IAuth: keyAuth
})
def testAnonymous(self):
remoteRepository = self._testPush(None)
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testInvalidUser(self):
remoteRepository = self._testPush("random")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testValidUser(self):
remoteRepository = self._testPush("key")
def processEnded(result):
self.assertNoError()
self.assertEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, keyFile = "test").addCallback(processEnded)
class PasswordAuthenticationTests(AuthenticationTest):
def setUp(self):
ApplicationTest.setUp(self)
self.startApplication(pluginPackages={
IAuth: passAuth
})
def _testSSH(self, user):
self.repository.initialize()
remoteRepository = self.createTemporaryRepository(None, self.repository.path, True)
self.repository.addRemote("origin", formatRemote("ssh", self.ssh, remoteRepository.path.split('/')[-1], user))
self.generateComplicatedCommit()
return remoteRepository
def _testHTTP(self, user):
self.repository.initialize()
remoteRepository = self.createTemporaryRepository(None, self.repository.path, True)
self.repository.addRemote("origin", formatRemote("http", self.http, remoteRepository.path.split('/')[-1], user))
self.generateComplicatedCommit()
return remoteRepository
def testSSHInvalidUser(self):
remoteRepository = self._testSSH("random")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testHTTPInvalidUser(self):
remoteRepository = self._testHTTP("random")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testSSHValidUserWrongPassword(self):
remoteRepository = self._testSSH("pass")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test").addCallback(processEnded)
def testHTTPValidUserWrongPassword(self):
remoteRepository = self._testHTTP("pass")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test").addCallback(processEnded)
def testSSHValidUser(self):
remoteRepository = self._testSSH("pass")
def processEnded(result):
self.assertNoError()
self.assertEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test_pass").addCallback(processEnded)
def testHTTPValidUser(self):
remoteRepository = self._testHTTP("pass")
def processEnded(result):
self.assertNoError()
self.assertEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test_pass").addCallback(processEnded) | data/train/python/39d087a3a98a3e9afcad282e2d7ff6e2837cce78test_authentication.py | from gitexd.interfaces import IAuth
from gitexd.tests import ApplicationTest, formatRemote, AuthenticationTest
from gitexd.tests.plugins import keyAuth, passAuth
__author__ = 'christophe'
class KeyAuthenticationTests(AuthenticationTest):
def setUp(self):
ApplicationTest.setUp(self)
self.startApplication(pluginPackages={
IAuth: keyAuth
})
def testAnonymous(self):
remoteRepository = self._testPush(None)
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testInvalidUser(self):
remoteRepository = self._testPush("random")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testValidUser(self):
remoteRepository = self._testPush("key")
def processEnded(result):
self.assertNoError()
self.assertEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, keyFile = "test").addCallback(processEnded)
class PasswordAuthenticationTests(AuthenticationTest):
def setUp(self):
ApplicationTest.setUp(self)
self.startApplication(pluginPackages={
IAuth: passAuth
})
def _testSSH(self, user):
self.repository.initialize()
remoteRepository = self.createTemporaryRepository(None, self.repository.path, True)
self.repository.addRemote("origin", formatRemote("ssh", self.ssh, remoteRepository.path.split('/')[-1], user))
self.generateComplicatedCommit()
return remoteRepository
def _testHTTP(self, user):
self.repository.initialize()
remoteRepository = self.createTemporaryRepository(None, self.repository.path, True)
self.repository.addRemote("origin", formatRemote("http", self.http, remoteRepository.path.split('/')[-1], user))
self.generateComplicatedCommit()
return remoteRepository
def testSSHInvalidUser(self):
remoteRepository = self._testSSH("random")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testHTTPInvalidUser(self):
remoteRepository = self._testHTTP("random")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository).addCallback(processEnded)
def testSSHValidUserWrongPassword(self):
remoteRepository = self._testSSH("pass")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test").addCallback(processEnded)
def testHTTPValidUserWrongPassword(self):
remoteRepository = self._testHTTP("pass")
def processEnded(result):
self.assertPermissionDenied()
self.assertNotEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test").addCallback(processEnded)
def testSSHValidUser(self):
remoteRepository = self._testSSH("pass")
def processEnded(result):
self.assertNoError()
self.assertEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test_pass").addCallback(processEnded)
def testHTTPValidUser(self):
remoteRepository = self._testHTTP("pass")
def processEnded(result):
self.assertNoError()
self.assertEqual(self.repository, remoteRepository)
return self.pushRepository(self.repository, "test_pass").addCallback(processEnded) | 0.538983 | 0.220384 |
# ### Resize images for select_characters ans speaker recipes - Prodigy
"""Usage:
resize_images.py <episode_name> <path_to_corpora>
"""
import os
import json
from custom_loaders import *
from PIL import Image
from pathlib import Path
from docopt import docopt
if __name__ == '__main__':
args = docopt(__doc__)
# path to Plumcot data
DATA_PLUMCOT = args["<path_to_corpora>"]
episode = args["<episode_name>"]
episodes_list = [episode]
for episode in episodes_list:
print("\nCurrent episode", episode)
series = episode.split('.')[0]
path = DATA_PLUMCOT
# path to credits
with open(os.path.join(path, f"{series}/credits.txt")) as f_c:
credits = f_c.read()
# path to characters
with open(os.path.join(path,f"{series}/characters.txt")) as f_ch:
characters = f_ch.read()
characters_list = [char.split(',')[0] for char in characters.split('\n') if char != '']
print(episode)
# credits per episodes
credits_dict = {episode.split(',')[0] : episode.split(',')[1:] for episode in credits.split('\n')}
final_dict = {}
for ep, credit in credits_dict.items():
final_dict[ep] = [ch for ch, cr in zip(characters_list, credit) if cr == "1"]
# credits for the current episode
episode_characters = final_dict[episode]
# open json file corresponding to the current show
data = [json.loads(line) for line in open(os.path.join(path,f"{series}/images/images.json"), 'r')]
# find centroid for each character in the current episode
for character in episode_characters :
for picture in data:
# add path to picture
if character == picture[0]:
img = Image.open(os.path.join(path,f"{series}/images/{picture[1]}"))
img_resize = img.resize((43, 44))
img_resize.save(os.path.join(path,f"{series}/images/{picture[1]}"))
print("DONE") | annotation_scripts/resize_images.py |
# ### Resize images for select_characters ans speaker recipes - Prodigy
"""Usage:
resize_images.py <episode_name> <path_to_corpora>
"""
import os
import json
from custom_loaders import *
from PIL import Image
from pathlib import Path
from docopt import docopt
if __name__ == '__main__':
args = docopt(__doc__)
# path to Plumcot data
DATA_PLUMCOT = args["<path_to_corpora>"]
episode = args["<episode_name>"]
episodes_list = [episode]
for episode in episodes_list:
print("\nCurrent episode", episode)
series = episode.split('.')[0]
path = DATA_PLUMCOT
# path to credits
with open(os.path.join(path, f"{series}/credits.txt")) as f_c:
credits = f_c.read()
# path to characters
with open(os.path.join(path,f"{series}/characters.txt")) as f_ch:
characters = f_ch.read()
characters_list = [char.split(',')[0] for char in characters.split('\n') if char != '']
print(episode)
# credits per episodes
credits_dict = {episode.split(',')[0] : episode.split(',')[1:] for episode in credits.split('\n')}
final_dict = {}
for ep, credit in credits_dict.items():
final_dict[ep] = [ch for ch, cr in zip(characters_list, credit) if cr == "1"]
# credits for the current episode
episode_characters = final_dict[episode]
# open json file corresponding to the current show
data = [json.loads(line) for line in open(os.path.join(path,f"{series}/images/images.json"), 'r')]
# find centroid for each character in the current episode
for character in episode_characters :
for picture in data:
# add path to picture
if character == picture[0]:
img = Image.open(os.path.join(path,f"{series}/images/{picture[1]}"))
img_resize = img.resize((43, 44))
img_resize.save(os.path.join(path,f"{series}/images/{picture[1]}"))
print("DONE") | 0.378115 | 0.293038 |
import uuid
import nanomsg
import logging
from .error import DecodeError
from .error import RequestParseError
from .error import AuthenticateError
from .error import AuthenticatorInvalidSignature
from .encoder import MsgPackEncoder
from .core import Endpoint
from .core import Process
class Responder(Endpoint, Process):
""" A service which responds to requests """
# pylint: disable=too-many-arguments
# pylint: disable=no-member
def __init__(self, address, encoder=None, authenticator=None,
socket=None, bind=True, timeouts=(None, None)):
# Defaults
socket = socket or nanomsg.Socket(nanomsg.REP)
encoder = encoder or MsgPackEncoder()
super(Responder, self).__init__(
socket, address, bind, encoder, authenticator, timeouts)
self.methods = {}
self.descriptions = {}
def execute(self, method, args, ref):
""" Execute the method with args """
response = {'result': None, 'error': None, 'ref': ref}
fun = self.methods.get(method)
if not fun:
response['error'] = 'Method `{}` not found'.format(method)
else:
try:
response['result'] = fun(*args)
except Exception as exception:
logging.error(exception, exc_info=1)
response['error'] = str(exception)
return response
def register(self, name, fun, description=None):
""" Register function on this service """
self.methods[name] = fun
self.descriptions[name] = description
@classmethod
def parse(cls, payload):
""" Parse client request """
try:
method, args, ref = payload
except Exception as exception:
raise RequestParseError(exception)
else:
return method, args, ref
# pylint: disable=logging-format-interpolation
def process(self):
""" Receive data from socket and process request """
response = None
try:
payload = self.receive()
method, args, ref = self.parse(payload)
response = self.execute(method, args, ref)
except AuthenticateError as exception:
logging.error(
'Service error while authenticating request: {}'
.format(exception), exc_info=1)
except AuthenticatorInvalidSignature as exception:
logging.error(
'Service error while authenticating request: {}'
.format(exception), exc_info=1)
except DecodeError as exception:
logging.error(
'Service error while decoding request: {}'
.format(exception), exc_info=1)
except RequestParseError as exception:
logging.error(
'Service error while parsing request: {}'
.format(exception), exc_info=1)
else:
logging.debug('Service received payload: {}'.format(payload))
if response:
self.send(response)
else:
self.send('')
class Requester(Endpoint):
""" A requester client """
# pylint: disable=too-many-arguments
# pylint: disable=no-member
def __init__(self, address, encoder=None, authenticator=None,
socket=None, bind=False, timeouts=(None, None)):
# Defaults
socket = socket or nanomsg.Socket(nanomsg.REQ)
encoder = encoder or MsgPackEncoder()
super(Requester, self).__init__(
socket, address, bind, encoder, authenticator, timeouts)
@classmethod
def build_payload(cls, method, args):
""" Build the payload to be sent to a `Responder` """
ref = str(uuid.uuid4())
return (method, args, ref)
# pylint: disable=logging-format-interpolation
def call(self, method, *args):
""" Make a call to a `Responder` and return the result """
payload = self.build_payload(method, args)
logging.debug('* Client will send payload: {}'.format(payload))
self.send(payload)
res = self.receive()
assert payload[2] == res['ref']
return res['result'], res['error'] | nanoservice/reqrep.py | import uuid
import nanomsg
import logging
from .error import DecodeError
from .error import RequestParseError
from .error import AuthenticateError
from .error import AuthenticatorInvalidSignature
from .encoder import MsgPackEncoder
from .core import Endpoint
from .core import Process
class Responder(Endpoint, Process):
""" A service which responds to requests """
# pylint: disable=too-many-arguments
# pylint: disable=no-member
def __init__(self, address, encoder=None, authenticator=None,
socket=None, bind=True, timeouts=(None, None)):
# Defaults
socket = socket or nanomsg.Socket(nanomsg.REP)
encoder = encoder or MsgPackEncoder()
super(Responder, self).__init__(
socket, address, bind, encoder, authenticator, timeouts)
self.methods = {}
self.descriptions = {}
def execute(self, method, args, ref):
""" Execute the method with args """
response = {'result': None, 'error': None, 'ref': ref}
fun = self.methods.get(method)
if not fun:
response['error'] = 'Method `{}` not found'.format(method)
else:
try:
response['result'] = fun(*args)
except Exception as exception:
logging.error(exception, exc_info=1)
response['error'] = str(exception)
return response
def register(self, name, fun, description=None):
""" Register function on this service """
self.methods[name] = fun
self.descriptions[name] = description
@classmethod
def parse(cls, payload):
""" Parse client request """
try:
method, args, ref = payload
except Exception as exception:
raise RequestParseError(exception)
else:
return method, args, ref
# pylint: disable=logging-format-interpolation
def process(self):
""" Receive data from socket and process request """
response = None
try:
payload = self.receive()
method, args, ref = self.parse(payload)
response = self.execute(method, args, ref)
except AuthenticateError as exception:
logging.error(
'Service error while authenticating request: {}'
.format(exception), exc_info=1)
except AuthenticatorInvalidSignature as exception:
logging.error(
'Service error while authenticating request: {}'
.format(exception), exc_info=1)
except DecodeError as exception:
logging.error(
'Service error while decoding request: {}'
.format(exception), exc_info=1)
except RequestParseError as exception:
logging.error(
'Service error while parsing request: {}'
.format(exception), exc_info=1)
else:
logging.debug('Service received payload: {}'.format(payload))
if response:
self.send(response)
else:
self.send('')
class Requester(Endpoint):
""" A requester client """
# pylint: disable=too-many-arguments
# pylint: disable=no-member
def __init__(self, address, encoder=None, authenticator=None,
socket=None, bind=False, timeouts=(None, None)):
# Defaults
socket = socket or nanomsg.Socket(nanomsg.REQ)
encoder = encoder or MsgPackEncoder()
super(Requester, self).__init__(
socket, address, bind, encoder, authenticator, timeouts)
@classmethod
def build_payload(cls, method, args):
""" Build the payload to be sent to a `Responder` """
ref = str(uuid.uuid4())
return (method, args, ref)
# pylint: disable=logging-format-interpolation
def call(self, method, *args):
""" Make a call to a `Responder` and return the result """
payload = self.build_payload(method, args)
logging.debug('* Client will send payload: {}'.format(payload))
self.send(payload)
res = self.receive()
assert payload[2] == res['ref']
return res['result'], res['error'] | 0.666497 | 0.069985 |
import pprint
import re # noqa: F401
import six
from openapi_client.configuration import Configuration
class RestartProcessInstanceDto(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'process_instance_ids': 'list[str]',
'historic_process_instance_query': 'HistoricProcessInstanceQueryDto',
'skip_custom_listeners': 'bool',
'skip_io_mappings': 'bool',
'initial_variables': 'bool',
'without_business_key': 'bool',
'instructions': 'list[RestartProcessInstanceModificationInstructionDto]'
}
attribute_map = {
'process_instance_ids': 'processInstanceIds',
'historic_process_instance_query': 'historicProcessInstanceQuery',
'skip_custom_listeners': 'skipCustomListeners',
'skip_io_mappings': 'skipIoMappings',
'initial_variables': 'initialVariables',
'without_business_key': 'withoutBusinessKey',
'instructions': 'instructions'
}
def __init__(self, process_instance_ids=None, historic_process_instance_query=None, skip_custom_listeners=None, skip_io_mappings=None, initial_variables=None, without_business_key=None, instructions=None, local_vars_configuration=None): # noqa: E501
"""RestartProcessInstanceDto - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._process_instance_ids = None
self._historic_process_instance_query = None
self._skip_custom_listeners = None
self._skip_io_mappings = None
self._initial_variables = None
self._without_business_key = None
self._instructions = None
self.discriminator = None
if process_instance_ids is not None:
self.process_instance_ids = process_instance_ids
if historic_process_instance_query is not None:
self.historic_process_instance_query = historic_process_instance_query
self.skip_custom_listeners = skip_custom_listeners
self.skip_io_mappings = skip_io_mappings
self.initial_variables = initial_variables
self.without_business_key = without_business_key
if instructions is not None:
self.instructions = instructions
@property
def process_instance_ids(self):
"""Gets the process_instance_ids of this RestartProcessInstanceDto. # noqa: E501
A list of process instance ids to restart. # noqa: E501
:return: The process_instance_ids of this RestartProcessInstanceDto. # noqa: E501
:rtype: list[str]
"""
return self._process_instance_ids
@process_instance_ids.setter
def process_instance_ids(self, process_instance_ids):
"""Sets the process_instance_ids of this RestartProcessInstanceDto.
A list of process instance ids to restart. # noqa: E501
:param process_instance_ids: The process_instance_ids of this RestartProcessInstanceDto. # noqa: E501
:type: list[str]
"""
self._process_instance_ids = process_instance_ids
@property
def historic_process_instance_query(self):
"""Gets the historic_process_instance_query of this RestartProcessInstanceDto. # noqa: E501
:return: The historic_process_instance_query of this RestartProcessInstanceDto. # noqa: E501
:rtype: HistoricProcessInstanceQueryDto
"""
return self._historic_process_instance_query
@historic_process_instance_query.setter
def historic_process_instance_query(self, historic_process_instance_query):
"""Sets the historic_process_instance_query of this RestartProcessInstanceDto.
:param historic_process_instance_query: The historic_process_instance_query of this RestartProcessInstanceDto. # noqa: E501
:type: HistoricProcessInstanceQueryDto
"""
self._historic_process_instance_query = historic_process_instance_query
@property
def skip_custom_listeners(self):
"""Gets the skip_custom_listeners of this RestartProcessInstanceDto. # noqa: E501
Skip execution listener invocation for activities that are started as part of this request. # noqa: E501
:return: The skip_custom_listeners of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._skip_custom_listeners
@skip_custom_listeners.setter
def skip_custom_listeners(self, skip_custom_listeners):
"""Sets the skip_custom_listeners of this RestartProcessInstanceDto.
Skip execution listener invocation for activities that are started as part of this request. # noqa: E501
:param skip_custom_listeners: The skip_custom_listeners of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._skip_custom_listeners = skip_custom_listeners
@property
def skip_io_mappings(self):
"""Gets the skip_io_mappings of this RestartProcessInstanceDto. # noqa: E501
Skip execution of [input/output variable mappings](https://docs.camunda.org/manual/7.13/user-guide/process-engine/variables/#input-output-variable-mapping) for activities that are started as part of this request. # noqa: E501
:return: The skip_io_mappings of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._skip_io_mappings
@skip_io_mappings.setter
def skip_io_mappings(self, skip_io_mappings):
"""Sets the skip_io_mappings of this RestartProcessInstanceDto.
Skip execution of [input/output variable mappings](https://docs.camunda.org/manual/7.13/user-guide/process-engine/variables/#input-output-variable-mapping) for activities that are started as part of this request. # noqa: E501
:param skip_io_mappings: The skip_io_mappings of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._skip_io_mappings = skip_io_mappings
@property
def initial_variables(self):
"""Gets the initial_variables of this RestartProcessInstanceDto. # noqa: E501
Set the initial set of variables during restart. By default, the last set of variables is used. # noqa: E501
:return: The initial_variables of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._initial_variables
@initial_variables.setter
def initial_variables(self, initial_variables):
"""Sets the initial_variables of this RestartProcessInstanceDto.
Set the initial set of variables during restart. By default, the last set of variables is used. # noqa: E501
:param initial_variables: The initial_variables of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._initial_variables = initial_variables
@property
def without_business_key(self):
"""Gets the without_business_key of this RestartProcessInstanceDto. # noqa: E501
Do not take over the business key of the historic process instance. # noqa: E501
:return: The without_business_key of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._without_business_key
@without_business_key.setter
def without_business_key(self, without_business_key):
"""Sets the without_business_key of this RestartProcessInstanceDto.
Do not take over the business key of the historic process instance. # noqa: E501
:param without_business_key: The without_business_key of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._without_business_key = without_business_key
@property
def instructions(self):
"""Gets the instructions of this RestartProcessInstanceDto. # noqa: E501
**Optional**. A JSON array of instructions that specify which activities to start the process instance at. If this property is omitted, the process instance starts at its default blank start event. # noqa: E501
:return: The instructions of this RestartProcessInstanceDto. # noqa: E501
:rtype: list[RestartProcessInstanceModificationInstructionDto]
"""
return self._instructions
@instructions.setter
def instructions(self, instructions):
"""Sets the instructions of this RestartProcessInstanceDto.
**Optional**. A JSON array of instructions that specify which activities to start the process instance at. If this property is omitted, the process instance starts at its default blank start event. # noqa: E501
:param instructions: The instructions of this RestartProcessInstanceDto. # noqa: E501
:type: list[RestartProcessInstanceModificationInstructionDto]
"""
self._instructions = instructions
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RestartProcessInstanceDto):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, RestartProcessInstanceDto):
return True
return self.to_dict() != other.to_dict() | openapi-python-client/openapi_client/models/restart_process_instance_dto.py | import pprint
import re # noqa: F401
import six
from openapi_client.configuration import Configuration
class RestartProcessInstanceDto(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'process_instance_ids': 'list[str]',
'historic_process_instance_query': 'HistoricProcessInstanceQueryDto',
'skip_custom_listeners': 'bool',
'skip_io_mappings': 'bool',
'initial_variables': 'bool',
'without_business_key': 'bool',
'instructions': 'list[RestartProcessInstanceModificationInstructionDto]'
}
attribute_map = {
'process_instance_ids': 'processInstanceIds',
'historic_process_instance_query': 'historicProcessInstanceQuery',
'skip_custom_listeners': 'skipCustomListeners',
'skip_io_mappings': 'skipIoMappings',
'initial_variables': 'initialVariables',
'without_business_key': 'withoutBusinessKey',
'instructions': 'instructions'
}
def __init__(self, process_instance_ids=None, historic_process_instance_query=None, skip_custom_listeners=None, skip_io_mappings=None, initial_variables=None, without_business_key=None, instructions=None, local_vars_configuration=None): # noqa: E501
"""RestartProcessInstanceDto - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._process_instance_ids = None
self._historic_process_instance_query = None
self._skip_custom_listeners = None
self._skip_io_mappings = None
self._initial_variables = None
self._without_business_key = None
self._instructions = None
self.discriminator = None
if process_instance_ids is not None:
self.process_instance_ids = process_instance_ids
if historic_process_instance_query is not None:
self.historic_process_instance_query = historic_process_instance_query
self.skip_custom_listeners = skip_custom_listeners
self.skip_io_mappings = skip_io_mappings
self.initial_variables = initial_variables
self.without_business_key = without_business_key
if instructions is not None:
self.instructions = instructions
@property
def process_instance_ids(self):
"""Gets the process_instance_ids of this RestartProcessInstanceDto. # noqa: E501
A list of process instance ids to restart. # noqa: E501
:return: The process_instance_ids of this RestartProcessInstanceDto. # noqa: E501
:rtype: list[str]
"""
return self._process_instance_ids
@process_instance_ids.setter
def process_instance_ids(self, process_instance_ids):
"""Sets the process_instance_ids of this RestartProcessInstanceDto.
A list of process instance ids to restart. # noqa: E501
:param process_instance_ids: The process_instance_ids of this RestartProcessInstanceDto. # noqa: E501
:type: list[str]
"""
self._process_instance_ids = process_instance_ids
@property
def historic_process_instance_query(self):
"""Gets the historic_process_instance_query of this RestartProcessInstanceDto. # noqa: E501
:return: The historic_process_instance_query of this RestartProcessInstanceDto. # noqa: E501
:rtype: HistoricProcessInstanceQueryDto
"""
return self._historic_process_instance_query
@historic_process_instance_query.setter
def historic_process_instance_query(self, historic_process_instance_query):
"""Sets the historic_process_instance_query of this RestartProcessInstanceDto.
:param historic_process_instance_query: The historic_process_instance_query of this RestartProcessInstanceDto. # noqa: E501
:type: HistoricProcessInstanceQueryDto
"""
self._historic_process_instance_query = historic_process_instance_query
@property
def skip_custom_listeners(self):
"""Gets the skip_custom_listeners of this RestartProcessInstanceDto. # noqa: E501
Skip execution listener invocation for activities that are started as part of this request. # noqa: E501
:return: The skip_custom_listeners of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._skip_custom_listeners
@skip_custom_listeners.setter
def skip_custom_listeners(self, skip_custom_listeners):
"""Sets the skip_custom_listeners of this RestartProcessInstanceDto.
Skip execution listener invocation for activities that are started as part of this request. # noqa: E501
:param skip_custom_listeners: The skip_custom_listeners of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._skip_custom_listeners = skip_custom_listeners
@property
def skip_io_mappings(self):
"""Gets the skip_io_mappings of this RestartProcessInstanceDto. # noqa: E501
Skip execution of [input/output variable mappings](https://docs.camunda.org/manual/7.13/user-guide/process-engine/variables/#input-output-variable-mapping) for activities that are started as part of this request. # noqa: E501
:return: The skip_io_mappings of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._skip_io_mappings
@skip_io_mappings.setter
def skip_io_mappings(self, skip_io_mappings):
"""Sets the skip_io_mappings of this RestartProcessInstanceDto.
Skip execution of [input/output variable mappings](https://docs.camunda.org/manual/7.13/user-guide/process-engine/variables/#input-output-variable-mapping) for activities that are started as part of this request. # noqa: E501
:param skip_io_mappings: The skip_io_mappings of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._skip_io_mappings = skip_io_mappings
@property
def initial_variables(self):
"""Gets the initial_variables of this RestartProcessInstanceDto. # noqa: E501
Set the initial set of variables during restart. By default, the last set of variables is used. # noqa: E501
:return: The initial_variables of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._initial_variables
@initial_variables.setter
def initial_variables(self, initial_variables):
"""Sets the initial_variables of this RestartProcessInstanceDto.
Set the initial set of variables during restart. By default, the last set of variables is used. # noqa: E501
:param initial_variables: The initial_variables of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._initial_variables = initial_variables
@property
def without_business_key(self):
"""Gets the without_business_key of this RestartProcessInstanceDto. # noqa: E501
Do not take over the business key of the historic process instance. # noqa: E501
:return: The without_business_key of this RestartProcessInstanceDto. # noqa: E501
:rtype: bool
"""
return self._without_business_key
@without_business_key.setter
def without_business_key(self, without_business_key):
"""Sets the without_business_key of this RestartProcessInstanceDto.
Do not take over the business key of the historic process instance. # noqa: E501
:param without_business_key: The without_business_key of this RestartProcessInstanceDto. # noqa: E501
:type: bool
"""
self._without_business_key = without_business_key
@property
def instructions(self):
"""Gets the instructions of this RestartProcessInstanceDto. # noqa: E501
**Optional**. A JSON array of instructions that specify which activities to start the process instance at. If this property is omitted, the process instance starts at its default blank start event. # noqa: E501
:return: The instructions of this RestartProcessInstanceDto. # noqa: E501
:rtype: list[RestartProcessInstanceModificationInstructionDto]
"""
return self._instructions
@instructions.setter
def instructions(self, instructions):
"""Sets the instructions of this RestartProcessInstanceDto.
**Optional**. A JSON array of instructions that specify which activities to start the process instance at. If this property is omitted, the process instance starts at its default blank start event. # noqa: E501
:param instructions: The instructions of this RestartProcessInstanceDto. # noqa: E501
:type: list[RestartProcessInstanceModificationInstructionDto]
"""
self._instructions = instructions
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RestartProcessInstanceDto):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, RestartProcessInstanceDto):
return True
return self.to_dict() != other.to_dict() | 0.649023 | 0.127653 |
# Commented out IPython magic to ensure Python compatibility.
!git clone https://github.com/iro-cp/FCRN-DepthPrediction
# %cd FCRN-DepthPrediction/tensorflow
!ls -l
!wget http://campar.in.tum.de/files/rupprecht/depthpred/NYU_ResNet-UpProj.npy
!wget http://campar.in.tum.de/files/rupprecht/depthpred/NYU_FCRN-checkpoint.zip
!unzip NYU_FCRN-checkpoint.zip
!ls -l
from google.colab import files
uploaded = files.upload()
import argparse
import os
import numpy as np
import tensorflow as tf
from matplotlib import pyplot as plt
from PIL import Image
from tensorflow.python.framework import graph_util
from tensorflow.python.tools import freeze_graph
from tensorflow.python.tools import optimize_for_inference_lib
from tensorflow.tools.graph_transforms import TransformGraph
import models
def predict(model_data_path, image_path):
tf.reset_default_graph()
# Default input size
height = 228
width = 304
channels = 3
batch_size = 1
# Read image
img = Image.open(image_path)
img = img.resize([width,height], Image.ANTIALIAS)
img = np.array(img).astype('float32')
img = np.expand_dims(np.asarray(img), axis = 0)
# Create a placeholder for the input image
input_node = tf.placeholder(tf.float32, shape=(None, height, width, channels))
# Construct the network
net = models.ResNet50UpProj({'data': input_node}, batch_size, 1, False)
with tf.Session() as sess:
# Load the converted parameters
print('Loading the model')
# Use to load from ckpt file
saver = tf.train.Saver()
saver.restore(sess, model_data_path)
# Use to load from npy file
#net.load(model_data_path, sess)
# Evalute the network for the given image
pred = sess.run(net.get_output(), feed_dict={input_node: img})
# Plot result
fig = plt.figure()
ii = plt.imshow(pred[0,:,:,0], interpolation='nearest')
fig.colorbar(ii)
plt.show()
in_graph = sess.graph.as_graph_def()
tf.train.write_graph(in_graph, '.', 'fcrn.pb',as_text=False)
oname = net.get_output().name
print("oname",oname)
output_nodes_names=["ConvPred/ConvPred"]
output_graph_def = graph_util.convert_variables_to_constants(
sess, # The session
sess.graph.as_graph_def(), # input_graph_def is useful for retrieving the nodes
output_nodes_names
)
# output_graph_def.save("export/frozen2.pb")
output_graph_name="frozen.pb"
with tf.gfile.GFile(output_graph_name, "wb") as f:
f.write(output_graph_def.SerializeToString())
inp_node = ['Placeholder']
optimize_graph_def = optimize_for_inference_lib.optimize_for_inference(output_graph_def, inp_node, output_nodes_names,
tf.float32.as_datatype_enum)
#optimize_graph_def = TransformGraph(optimize_graph_def, inp_node, output_nodes_names, ["sort_by_execution_order"])
output_graph_name="optimize.pb"
with tf.gfile.GFile(output_graph_name, "wb") as f:
f.write(optimize_graph_def.SerializeToString())
return pred
# Predict the image
pred = predict("NYU_FCRN.ckpt", "00A9E4A0CC211F300F54C62EC35348B5BD80B34E.png")
!ls -l
import cv2
net = cv2.dnn.readNet("optimize.pb") | colab/fcrn_depth.py | # Commented out IPython magic to ensure Python compatibility.
!git clone https://github.com/iro-cp/FCRN-DepthPrediction
# %cd FCRN-DepthPrediction/tensorflow
!ls -l
!wget http://campar.in.tum.de/files/rupprecht/depthpred/NYU_ResNet-UpProj.npy
!wget http://campar.in.tum.de/files/rupprecht/depthpred/NYU_FCRN-checkpoint.zip
!unzip NYU_FCRN-checkpoint.zip
!ls -l
from google.colab import files
uploaded = files.upload()
import argparse
import os
import numpy as np
import tensorflow as tf
from matplotlib import pyplot as plt
from PIL import Image
from tensorflow.python.framework import graph_util
from tensorflow.python.tools import freeze_graph
from tensorflow.python.tools import optimize_for_inference_lib
from tensorflow.tools.graph_transforms import TransformGraph
import models
def predict(model_data_path, image_path):
tf.reset_default_graph()
# Default input size
height = 228
width = 304
channels = 3
batch_size = 1
# Read image
img = Image.open(image_path)
img = img.resize([width,height], Image.ANTIALIAS)
img = np.array(img).astype('float32')
img = np.expand_dims(np.asarray(img), axis = 0)
# Create a placeholder for the input image
input_node = tf.placeholder(tf.float32, shape=(None, height, width, channels))
# Construct the network
net = models.ResNet50UpProj({'data': input_node}, batch_size, 1, False)
with tf.Session() as sess:
# Load the converted parameters
print('Loading the model')
# Use to load from ckpt file
saver = tf.train.Saver()
saver.restore(sess, model_data_path)
# Use to load from npy file
#net.load(model_data_path, sess)
# Evalute the network for the given image
pred = sess.run(net.get_output(), feed_dict={input_node: img})
# Plot result
fig = plt.figure()
ii = plt.imshow(pred[0,:,:,0], interpolation='nearest')
fig.colorbar(ii)
plt.show()
in_graph = sess.graph.as_graph_def()
tf.train.write_graph(in_graph, '.', 'fcrn.pb',as_text=False)
oname = net.get_output().name
print("oname",oname)
output_nodes_names=["ConvPred/ConvPred"]
output_graph_def = graph_util.convert_variables_to_constants(
sess, # The session
sess.graph.as_graph_def(), # input_graph_def is useful for retrieving the nodes
output_nodes_names
)
# output_graph_def.save("export/frozen2.pb")
output_graph_name="frozen.pb"
with tf.gfile.GFile(output_graph_name, "wb") as f:
f.write(output_graph_def.SerializeToString())
inp_node = ['Placeholder']
optimize_graph_def = optimize_for_inference_lib.optimize_for_inference(output_graph_def, inp_node, output_nodes_names,
tf.float32.as_datatype_enum)
#optimize_graph_def = TransformGraph(optimize_graph_def, inp_node, output_nodes_names, ["sort_by_execution_order"])
output_graph_name="optimize.pb"
with tf.gfile.GFile(output_graph_name, "wb") as f:
f.write(optimize_graph_def.SerializeToString())
return pred
# Predict the image
pred = predict("NYU_FCRN.ckpt", "00A9E4A0CC211F300F54C62EC35348B5BD80B34E.png")
!ls -l
import cv2
net = cv2.dnn.readNet("optimize.pb") | 0.730866 | 0.308919 |
from collections import OrderedDict
POTENTIALLY_REQUIRED_DUNGEONS = ['Skyview','Earth Temple','Lanayru Mining Facility','Ancient Cistern','Sandship','Fire Sanctuary']
DUNGEON_NAMES = OrderedDict([
("SV", "Skyview"),
("ET", "Earth Temple"),
("LMF", "Lanayru Mining Facility"),
("AC", "Ancient Cistern"),
("SS", "Sandship"),
("FS", "Fire Sanctuary"),
("SK", "Sky Keep"),
('LanayruCaves', 'Lanayru Caves'), # "short name" doesn't allow space
])
DUNGEON_NAME_TO_SHORT_DUNGEON_NAME = OrderedDict([v, k] for k, v in DUNGEON_NAMES.items())
SHOP_CHECKS = [
"Beedle - 50 Rupee Item",
"Beedle - First 100 Rupee Item",
"Beedle - Second 100 Rupee Item",
"Beedle - Third 100 Rupee Item",
"Beedle - 300 Rupee Item",
"Beedle - 600 Rupee Item",
"Beedle - 800 Rupee Item",
"Beedle - 1000 Rupee Item",
"Beedle - 1200 Rupee Item",
"Beedle - 1600 Rupee Item",
]
MAP_CHECKS = [
'Skyview - Map Chest',
'Earth Temple - Map Chest',
'Lanayru Mining Facility - Map Chest',
'Ancient Cistern - Map Chest',
'Sandship - Map Chest',
'Fire Sanctuary - Map Chest',
'Sky Keep - Map Chest',
]
SMALL_KEY_CHECKS = [
'Skyview - Behind Two Eyes',
'Skyview - Behind Three Eyes',
'Lanayru Mining Facility - First Chest in Hub Room',
'Ancient Cistern - Small Key Chest',
'Ancient Cistern - Bokoblin',
'Sandship - Behind Combination Lock',
'Sandship - Robot in Brig',
'Fire Sanctuary - First Room',
'Fire Sanctuary - Second Small Key Chest',
'Fire Sanctuary - Third Small Key Chest',
'Lanayru Caves - Golo',
'Sky Keep - Small Key Chest'
]
BOSS_KEY_CHECKS = [
'Skyview - Boss Key',
'Earth Temple - Boss Key',
'Lanayru Mining Facility - Boss Key',
'Ancient Cistern - Boss Key',
'Sandship - Boss Key',
'Fire Sanctuary - Boss Key',
]
ALL_TYPES = ['skyloft', 'sky', 'thunderhead', 'faron', 'eldin', 'lanayru', 'dungeon', 'mini dungeon', 'free gift',
'freestanding', 'miscellaneous', 'silent realm', 'digging', 'bombable', 'combat', 'song', 'spiral charge',
'minigame', 'crystal', 'short', 'long', 'fetch', 'crystal quest', 'scrapper', 'peatrice', 'beedle',
'cheap', 'medium', 'expensive',
'goddess', 'faron goddess', 'eldin goddess', 'lanayru goddess', 'floria goddess', 'summit goddess',
'sand sea goddess'] | logic/constants.py | from collections import OrderedDict
POTENTIALLY_REQUIRED_DUNGEONS = ['Skyview','Earth Temple','Lanayru Mining Facility','Ancient Cistern','Sandship','Fire Sanctuary']
DUNGEON_NAMES = OrderedDict([
("SV", "Skyview"),
("ET", "Earth Temple"),
("LMF", "Lanayru Mining Facility"),
("AC", "Ancient Cistern"),
("SS", "Sandship"),
("FS", "Fire Sanctuary"),
("SK", "Sky Keep"),
('LanayruCaves', 'Lanayru Caves'), # "short name" doesn't allow space
])
DUNGEON_NAME_TO_SHORT_DUNGEON_NAME = OrderedDict([v, k] for k, v in DUNGEON_NAMES.items())
SHOP_CHECKS = [
"Beedle - 50 Rupee Item",
"Beedle - First 100 Rupee Item",
"Beedle - Second 100 Rupee Item",
"Beedle - Third 100 Rupee Item",
"Beedle - 300 Rupee Item",
"Beedle - 600 Rupee Item",
"Beedle - 800 Rupee Item",
"Beedle - 1000 Rupee Item",
"Beedle - 1200 Rupee Item",
"Beedle - 1600 Rupee Item",
]
MAP_CHECKS = [
'Skyview - Map Chest',
'Earth Temple - Map Chest',
'Lanayru Mining Facility - Map Chest',
'Ancient Cistern - Map Chest',
'Sandship - Map Chest',
'Fire Sanctuary - Map Chest',
'Sky Keep - Map Chest',
]
SMALL_KEY_CHECKS = [
'Skyview - Behind Two Eyes',
'Skyview - Behind Three Eyes',
'Lanayru Mining Facility - First Chest in Hub Room',
'Ancient Cistern - Small Key Chest',
'Ancient Cistern - Bokoblin',
'Sandship - Behind Combination Lock',
'Sandship - Robot in Brig',
'Fire Sanctuary - First Room',
'Fire Sanctuary - Second Small Key Chest',
'Fire Sanctuary - Third Small Key Chest',
'Lanayru Caves - Golo',
'Sky Keep - Small Key Chest'
]
BOSS_KEY_CHECKS = [
'Skyview - Boss Key',
'Earth Temple - Boss Key',
'Lanayru Mining Facility - Boss Key',
'Ancient Cistern - Boss Key',
'Sandship - Boss Key',
'Fire Sanctuary - Boss Key',
]
ALL_TYPES = ['skyloft', 'sky', 'thunderhead', 'faron', 'eldin', 'lanayru', 'dungeon', 'mini dungeon', 'free gift',
'freestanding', 'miscellaneous', 'silent realm', 'digging', 'bombable', 'combat', 'song', 'spiral charge',
'minigame', 'crystal', 'short', 'long', 'fetch', 'crystal quest', 'scrapper', 'peatrice', 'beedle',
'cheap', 'medium', 'expensive',
'goddess', 'faron goddess', 'eldin goddess', 'lanayru goddess', 'floria goddess', 'summit goddess',
'sand sea goddess'] | 0.491212 | 0.422683 |
import gzip
import shutil
import subprocess
from dataclasses import dataclass, field
from pathlib import Path
from typing import Dict, Iterator, List, Union, Tuple
import numpy as np
from Bio import SeqIO
from dataclasses_json import config, dataclass_json
@dataclass_json
@dataclass(frozen=True)
class PrimerMatch:
match: str = field(metadata=config(field_name="match"))
start: int = field(metadata=config(field_name="start"))
end: int = field(metadata=config(field_name="end"))
dist: int = field(metadata=config(field_name="dist"))
@dataclass_json
@dataclass(frozen=True)
class RepData:
start: int = field(metadata=config(field_name="start"))
end: int = field(metadata=config(field_name="end"))
n_reps: int = field(metadata=config(field_name="n_reps"))
rep_seq: str = field(metadata=config(field_name="rep_seq"))
motif: str = field(metadata=config(field_name="motif"))
motif_class: str = field(metadata=config(field_name="motif_class"))
@dataclass_json
@dataclass
class SeqData:
id: str = field(metadata=config(field_name="id"))
seq: str = field(metadata=config(field_name="seq"))
counts: int = field(metadata=config(field_name="counts"))
length: int = field(default=int)
rep_data: List[RepData] = field(
default_factory=list, metadata=config(field_name="rep_data")
)
samples: List[str] = field(
default_factory=list, metadata=config(field_name="samples")
)
non_ssr_seq: str = field(default="", metadata=config(field_name="non_ssr_seq"))
stutter_s: List[str] = field(
default_factory=list, metadata=config(field_name="stutter_s")
)
stutter_l: List[str] = field(
default_factory=list, metadata=config(field_name="stutter_l")
)
def __post_init__(self):
self.seq = self.seq.upper()
self.length = len(self.seq.replace("-", ""))
@dataclass_json
@dataclass
class GenData:
id: str = field(metadata=config(field_name="id"))
n_reads_all: int = field(metadata=config(field_name="n_reads_all"))
genotype: List[str] = field(
default_factory=list, metadata=config(field_name="genotype")
)
n_reads_each: Dict[str, int] = field(
default_factory=dict, metadata=config(field_name="n_reads_each")
)
def __post_init__(self):
try:
err = "Allele sets differed between genotype and n_reads_each"
alleles = [x for x in self.genotype if x != "NA"]
assert set(alleles) == set(self.n_reads_each.keys()), err
except AssertionError:
print(set(alleles), set(self.n_reads_each))
class MarkerData(object):
def __init__(
self,
path_marker_data=None,
path_fwd_primers=None,
path_rev_primers=None,
verbose=False,
**kwargs,
):
self.dict_primer = self.read_marker_file(
path_marker_data, path_fwd_primers, path_fwd_primers, verbose
)
self.path_marker_data = path_marker_data
if self.path_marker_data:
self.set_frag_len()
self.set_max_alleles()
def set_frag_len(self):
try:
self.dict_frag_len = gen_dict_from_table(
self.path_marker_data, "Name", "Frag_len"
)
except ValueError:
self.dict_frag_len = None
def set_max_alleles(self):
try:
self.dict_max_alleles = gen_dict_from_table(
self.path_marker_data, "Name", "Max_alleles"
)
except ValueError:
self.dict_max_alleles = None
def read_marker_file(
self,
path_marker_data=None,
path_fwd_primers=None,
path_rev_primers=None,
verbose=False,
):
if path_marker_data:
dict_primer = gen_dict_from_table(
path_marker_data, key="Name", value=["Fwd", "Rev"]
)
elif path_fwd_primers and path_rev_primers:
len_f = count_records(path_fwd_primers)
len_r = count_records(path_rev_primers)
if len_f != len_r:
msg = "The number of sequences differs between "
msg += "{} and {}".format(path_fwd_primers, path_rev_primers)
raise RuntimeError(msg)
dict_primer = {}
msg = "\nThe names of foward and rev primers do not match. "
msg += "The foward primer names are used as marker names:"
for f, r in zip(read_fastx(path_fwd_primers), read_fastx(path_rev_primers)):
if f.id != r.id and verbose:
if msg:
print(msg)
msg = ""
print("Fwd: {0}, Rev: {1} -----> {0}".format(f.id, r.id))
dict_primer[f.id] = [str(f.seq), str(r.seq)]
elif path_fwd_primers:
msg = "'path_fwd_primers' not provided"
raise ValueError(msg)
elif path_rev_primers:
msg = "'path_rev_primers' not provided"
raise ValueError(msg)
else:
msg = "No information for primer sequences provided"
raise ValueError(msg)
return dict_primer
def revc(s: str) -> str:
"""Return the reverse compelement of given nucleotide sequence."""
o = "ACGTUWSMKRYBDHVNZacgtuwsmkrybdhvnz-"
c = "TGCAAWSKMYRVHDBNZtgcaawskmyrvhdbnz-"
if len(set(s) & set(o)) > len(set(s)):
errmsg = "invalid character was found in the sequeces"
raise RuntimeError(errmsg)
return s.translate(str.maketrans(o, c))[::-1]
def check_file(filepath: Union[str, Path]):
"""Check if a path exists and if it is a file."""
if isinstance(filepath, str):
filepath = Path(filepath)
if not filepath.exists():
errmsg = "File not found: {}".format(filepath)
raise FileNotFoundError(errmsg)
if not filepath.is_file():
errmsg = "'{}' is not a file".format(filepath)
raise RuntimeError(errmsg)
def check_no_wrapped(filepath: Union[str, Path], fmt: str = "fastq"):
"""
Check the input sequence file and raise an error if it is wrapped.
Parameters
----------
filepath :
path to the sequence file
fmt :
file format ('fasta' or 'fastq')
"""
if isinstance(filepath, str):
filepath = Path(filepath)
check_file(filepath)
if filepath.suffix == ".gz":
if shutil.which("rg"):
prog0 = "rg -z"
prog1 = "rg"
else:
prog0 = "zgrep"
prog1 = "grep"
else:
if shutil.which("rg"):
prog0 = prog1 = "rg"
else:
prog0 = prog1 = "grep"
if fmt == "fasta":
cmd0 = "{} -n -m 20 ^> {}".format(prog0, filepath).split()
cmd1 = r"cut -d: -f1".split()
res = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
res = subprocess.Popen(cmd1, stdin=res.stdout, stdout=subprocess.PIPE)
j, k = 1, 2
elif fmt == "fastq":
cmd0 = r"{} -A2 ^@ {}".format(prog0, filepath).split()
cmd1 = r"{} -n -m 20 ^\+".format(prog1).split()
cmd2 = r"cut -d: -f1".split()
res = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
res = subprocess.Popen(cmd1, stdin=res.stdout, stdout=subprocess.PIPE)
res = subprocess.Popen(cmd2, stdin=res.stdout, stdout=subprocess.PIPE)
j, k = 3, 4
line_nos = res.stdout.read().decode("utf-8").strip().split()
if line_nos:
line_nos = np.array(line_nos).astype(int)
n = len(line_nos)
assert np.any((line_nos - j) / k == np.arange(n))
def count_records(
filepath: Union[str, Path], fmt: str = "fastq", opts: str = ""
) -> int:
"""
Count the number of sequence records in a fasta/fastq file.
Parameters
----------
filepath:
path to the input fasta/fastq file
fmt:
file format (default: "fasta")
opts:
options for grep
"""
if isinstance(filepath, str):
filepath = Path(filepath)
check_file(filepath)
if filepath.suffix == ".gz":
if shutil.which("rg"):
prog0 = "rg -z"
prog1 = "rg"
else:
prog0 = "zgrep"
prog1 = "grep"
else:
if shutil.which("rg"):
prog0 = "rg"
prog1 = "rg"
else:
prog0 = "grep"
prog1 = "grep"
if fmt == "fasta":
cmd0 = "{} -c ^> {} {}".format(prog0, filepath, opts).split()
res0 = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
return int(res0.stdout.read().decode("utf-8").strip())
elif fmt == "fastq":
cmd0 = r"{} -A2 ^@ {} {}".format(prog0, filepath, opts).split()
cmd1 = r"{} -c ^\+".format(prog1).split()
res0 = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
res1 = subprocess.Popen(cmd1, stdin=res0.stdout, stdout=subprocess.PIPE)
line_no = res1.stdout.read().decode("utf-8").strip()
if line_no:
return int(line_no)
else:
return 0
else:
errmsg = "fmt must be either 'fastq' or 'fasta'"
raise ValueError(errmsg)
def read_fastx(
filepath: Union[str, Path], fmt: str = "auto"
) -> Iterator[SeqIO.SeqRecord]:
"""
Read a fasta/fastq file and return a generator of Bio.Seq.SeqRecord objects.
Parameters
----------
filepath:
path to the input fasta/fastq file
fmt:
'fasta', 'fastq' or 'auto' (default: "auto")
See Also
--------
Bio.SeqIO.parse
"""
if isinstance(filepath, str):
filepath = Path(filepath)
if fmt == "auto":
fmt = guess_fmt(filepath)
elif fmt not in ["fasta", "fastq"]:
raise ValueError("'fmt' must be 'fasta', 'fastq', or 'auto'")
if not count_records(filepath, fmt):
errmsg = "No sequence records found in {}".format(filepath)
raise RuntimeError(errmsg)
if filepath.suffix == ".gz":
with gzip.open(filepath, "rt") as handle:
for record in SeqIO.parse(handle, fmt):
yield record
else:
with filepath.open("r") as handle:
for record in SeqIO.parse(handle, fmt):
yield record
def count_uniq_seq(filepath, read_count_in_id=False, **kwargs):
"""
Count the number of reads for each unique sequence.
Parameters
----------
filepath :
path to the input fasta/fastq file
fmt : []
"fasta", "fastq" or "auto" (default: "auto")
read_count_in_id:
read counts in sequence id
kwargs:
keyward arguments
"""
seq_count = {}
for rec in read_fastx(filepath, **kwargs):
seq = str(rec.seq).replace("-", "")
if read_count_in_id:
count = int(rec.id.split(":")[-1])
idx = "_".join(rec.id.split("_")[:-1])
if seq in seq_count.keys():
seq_count[seq][0] += count
seq_count[seq][1] += [idx]
else:
seq_count[seq] = [count, [idx]]
else:
if seq in seq_count.keys():
seq_count[seq] += 1
else:
seq_count[seq] = 1
if read_count_in_id:
seq_count = dict(
sorted(
seq_count.items(), key=lambda x: (len(x[1][1]), x[1][0]), reverse=True,
)
)
else:
seq_count = dict(sorted(seq_count.items(), key=lambda x: x[1], reverse=True))
return seq_count
def gen_dict_from_table(filepath, key, value, header=True, delimiter=","):
"""
Generate a dict object from a tablular file.
Parameters
----------
filepath : str or Path
path to the input tabular file
key : int or str
column index or name for dict key
value : int or str or list
column index or name for dict value
header : bool
Whether the input file contains a header row (default: True)
delimiter : str
delimiter character of the input file (default: ",")
"""
filepath = Path(filepath)
check_file(filepath)
if not delimiter:
if filepath.suffix == ".csv":
delimiter = ","
if filepath.suffix == ".tsv":
delimiter = r"\t+"
if filepath.suffix == ".txt":
delimiter = None
with filepath.open() as f:
line = f.readline()
if header:
hd = line.strip().split(delimiter)
line = f.readline()
else:
hd = []
items_list = []
while line:
items_list.append([parse_item(x) for x in line.strip().split(delimiter)])
line = f.readline().strip()
if not hd and not isinstance(key, int):
raise TypeError("key must be int when no header row in input file")
else:
if isinstance(key, int):
idx0 = key
else:
try:
idx0 = hd.index(key)
except ValueError:
raise ValueError("Column '{}' not found in {}".format(key, filepath))
if isinstance(value, str) or isinstance(value, int):
value = [value]
elif not isinstance(value, list):
raise TypeError("value must be a str, int, or list")
if np.array(value).dtype == "int64":
idx1 = value
else:
try:
idx1 = [hd.index(v) for v in value]
except ValueError:
not_found = ", ".join([v for v in value if v not in hd])
raise ValueError("Column '{}' not found in {}".format(not_found, filepath))
if len(idx1) > 1:
return {items[idx0]: [items[x] for x in idx1] for items in items_list}
else:
return {items[idx0]: items[idx1[0]] for items in items_list}
def guess_fmt(filepath):
"""
Guess the file format (FASTA/FASTQ) of the input sequence file
"""
filepath = Path(filepath)
if filepath.name.find("fastq") > -1:
fmt = "fastq"
elif filepath.name.find("fasta") > -1:
fmt = "fasta"
elif count_records(filepath, "fastq", "-m 10"):
fmt = "fastq"
elif count_records(filepath, "fasta", "-m 10"):
fmt = "fasta"
else:
raise RuntimeError("Unable to determine file format")
return fmt
def parse_item(s):
return int(s) if s.isdigit() else float(s) if isfloat(s) else s
def isfloat(value):
try:
float(value)
return True
except ValueError:
return False | massgenotyping/base.py | import gzip
import shutil
import subprocess
from dataclasses import dataclass, field
from pathlib import Path
from typing import Dict, Iterator, List, Union, Tuple
import numpy as np
from Bio import SeqIO
from dataclasses_json import config, dataclass_json
@dataclass_json
@dataclass(frozen=True)
class PrimerMatch:
match: str = field(metadata=config(field_name="match"))
start: int = field(metadata=config(field_name="start"))
end: int = field(metadata=config(field_name="end"))
dist: int = field(metadata=config(field_name="dist"))
@dataclass_json
@dataclass(frozen=True)
class RepData:
start: int = field(metadata=config(field_name="start"))
end: int = field(metadata=config(field_name="end"))
n_reps: int = field(metadata=config(field_name="n_reps"))
rep_seq: str = field(metadata=config(field_name="rep_seq"))
motif: str = field(metadata=config(field_name="motif"))
motif_class: str = field(metadata=config(field_name="motif_class"))
@dataclass_json
@dataclass
class SeqData:
id: str = field(metadata=config(field_name="id"))
seq: str = field(metadata=config(field_name="seq"))
counts: int = field(metadata=config(field_name="counts"))
length: int = field(default=int)
rep_data: List[RepData] = field(
default_factory=list, metadata=config(field_name="rep_data")
)
samples: List[str] = field(
default_factory=list, metadata=config(field_name="samples")
)
non_ssr_seq: str = field(default="", metadata=config(field_name="non_ssr_seq"))
stutter_s: List[str] = field(
default_factory=list, metadata=config(field_name="stutter_s")
)
stutter_l: List[str] = field(
default_factory=list, metadata=config(field_name="stutter_l")
)
def __post_init__(self):
self.seq = self.seq.upper()
self.length = len(self.seq.replace("-", ""))
@dataclass_json
@dataclass
class GenData:
id: str = field(metadata=config(field_name="id"))
n_reads_all: int = field(metadata=config(field_name="n_reads_all"))
genotype: List[str] = field(
default_factory=list, metadata=config(field_name="genotype")
)
n_reads_each: Dict[str, int] = field(
default_factory=dict, metadata=config(field_name="n_reads_each")
)
def __post_init__(self):
try:
err = "Allele sets differed between genotype and n_reads_each"
alleles = [x for x in self.genotype if x != "NA"]
assert set(alleles) == set(self.n_reads_each.keys()), err
except AssertionError:
print(set(alleles), set(self.n_reads_each))
class MarkerData(object):
def __init__(
self,
path_marker_data=None,
path_fwd_primers=None,
path_rev_primers=None,
verbose=False,
**kwargs,
):
self.dict_primer = self.read_marker_file(
path_marker_data, path_fwd_primers, path_fwd_primers, verbose
)
self.path_marker_data = path_marker_data
if self.path_marker_data:
self.set_frag_len()
self.set_max_alleles()
def set_frag_len(self):
try:
self.dict_frag_len = gen_dict_from_table(
self.path_marker_data, "Name", "Frag_len"
)
except ValueError:
self.dict_frag_len = None
def set_max_alleles(self):
try:
self.dict_max_alleles = gen_dict_from_table(
self.path_marker_data, "Name", "Max_alleles"
)
except ValueError:
self.dict_max_alleles = None
def read_marker_file(
self,
path_marker_data=None,
path_fwd_primers=None,
path_rev_primers=None,
verbose=False,
):
if path_marker_data:
dict_primer = gen_dict_from_table(
path_marker_data, key="Name", value=["Fwd", "Rev"]
)
elif path_fwd_primers and path_rev_primers:
len_f = count_records(path_fwd_primers)
len_r = count_records(path_rev_primers)
if len_f != len_r:
msg = "The number of sequences differs between "
msg += "{} and {}".format(path_fwd_primers, path_rev_primers)
raise RuntimeError(msg)
dict_primer = {}
msg = "\nThe names of foward and rev primers do not match. "
msg += "The foward primer names are used as marker names:"
for f, r in zip(read_fastx(path_fwd_primers), read_fastx(path_rev_primers)):
if f.id != r.id and verbose:
if msg:
print(msg)
msg = ""
print("Fwd: {0}, Rev: {1} -----> {0}".format(f.id, r.id))
dict_primer[f.id] = [str(f.seq), str(r.seq)]
elif path_fwd_primers:
msg = "'path_fwd_primers' not provided"
raise ValueError(msg)
elif path_rev_primers:
msg = "'path_rev_primers' not provided"
raise ValueError(msg)
else:
msg = "No information for primer sequences provided"
raise ValueError(msg)
return dict_primer
def revc(s: str) -> str:
"""Return the reverse compelement of given nucleotide sequence."""
o = "ACGTUWSMKRYBDHVNZacgtuwsmkrybdhvnz-"
c = "TGCAAWSKMYRVHDBNZtgcaawskmyrvhdbnz-"
if len(set(s) & set(o)) > len(set(s)):
errmsg = "invalid character was found in the sequeces"
raise RuntimeError(errmsg)
return s.translate(str.maketrans(o, c))[::-1]
def check_file(filepath: Union[str, Path]):
"""Check if a path exists and if it is a file."""
if isinstance(filepath, str):
filepath = Path(filepath)
if not filepath.exists():
errmsg = "File not found: {}".format(filepath)
raise FileNotFoundError(errmsg)
if not filepath.is_file():
errmsg = "'{}' is not a file".format(filepath)
raise RuntimeError(errmsg)
def check_no_wrapped(filepath: Union[str, Path], fmt: str = "fastq"):
"""
Check the input sequence file and raise an error if it is wrapped.
Parameters
----------
filepath :
path to the sequence file
fmt :
file format ('fasta' or 'fastq')
"""
if isinstance(filepath, str):
filepath = Path(filepath)
check_file(filepath)
if filepath.suffix == ".gz":
if shutil.which("rg"):
prog0 = "rg -z"
prog1 = "rg"
else:
prog0 = "zgrep"
prog1 = "grep"
else:
if shutil.which("rg"):
prog0 = prog1 = "rg"
else:
prog0 = prog1 = "grep"
if fmt == "fasta":
cmd0 = "{} -n -m 20 ^> {}".format(prog0, filepath).split()
cmd1 = r"cut -d: -f1".split()
res = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
res = subprocess.Popen(cmd1, stdin=res.stdout, stdout=subprocess.PIPE)
j, k = 1, 2
elif fmt == "fastq":
cmd0 = r"{} -A2 ^@ {}".format(prog0, filepath).split()
cmd1 = r"{} -n -m 20 ^\+".format(prog1).split()
cmd2 = r"cut -d: -f1".split()
res = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
res = subprocess.Popen(cmd1, stdin=res.stdout, stdout=subprocess.PIPE)
res = subprocess.Popen(cmd2, stdin=res.stdout, stdout=subprocess.PIPE)
j, k = 3, 4
line_nos = res.stdout.read().decode("utf-8").strip().split()
if line_nos:
line_nos = np.array(line_nos).astype(int)
n = len(line_nos)
assert np.any((line_nos - j) / k == np.arange(n))
def count_records(
filepath: Union[str, Path], fmt: str = "fastq", opts: str = ""
) -> int:
"""
Count the number of sequence records in a fasta/fastq file.
Parameters
----------
filepath:
path to the input fasta/fastq file
fmt:
file format (default: "fasta")
opts:
options for grep
"""
if isinstance(filepath, str):
filepath = Path(filepath)
check_file(filepath)
if filepath.suffix == ".gz":
if shutil.which("rg"):
prog0 = "rg -z"
prog1 = "rg"
else:
prog0 = "zgrep"
prog1 = "grep"
else:
if shutil.which("rg"):
prog0 = "rg"
prog1 = "rg"
else:
prog0 = "grep"
prog1 = "grep"
if fmt == "fasta":
cmd0 = "{} -c ^> {} {}".format(prog0, filepath, opts).split()
res0 = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
return int(res0.stdout.read().decode("utf-8").strip())
elif fmt == "fastq":
cmd0 = r"{} -A2 ^@ {} {}".format(prog0, filepath, opts).split()
cmd1 = r"{} -c ^\+".format(prog1).split()
res0 = subprocess.Popen(cmd0, stdout=subprocess.PIPE)
res1 = subprocess.Popen(cmd1, stdin=res0.stdout, stdout=subprocess.PIPE)
line_no = res1.stdout.read().decode("utf-8").strip()
if line_no:
return int(line_no)
else:
return 0
else:
errmsg = "fmt must be either 'fastq' or 'fasta'"
raise ValueError(errmsg)
def read_fastx(
filepath: Union[str, Path], fmt: str = "auto"
) -> Iterator[SeqIO.SeqRecord]:
"""
Read a fasta/fastq file and return a generator of Bio.Seq.SeqRecord objects.
Parameters
----------
filepath:
path to the input fasta/fastq file
fmt:
'fasta', 'fastq' or 'auto' (default: "auto")
See Also
--------
Bio.SeqIO.parse
"""
if isinstance(filepath, str):
filepath = Path(filepath)
if fmt == "auto":
fmt = guess_fmt(filepath)
elif fmt not in ["fasta", "fastq"]:
raise ValueError("'fmt' must be 'fasta', 'fastq', or 'auto'")
if not count_records(filepath, fmt):
errmsg = "No sequence records found in {}".format(filepath)
raise RuntimeError(errmsg)
if filepath.suffix == ".gz":
with gzip.open(filepath, "rt") as handle:
for record in SeqIO.parse(handle, fmt):
yield record
else:
with filepath.open("r") as handle:
for record in SeqIO.parse(handle, fmt):
yield record
def count_uniq_seq(filepath, read_count_in_id=False, **kwargs):
"""
Count the number of reads for each unique sequence.
Parameters
----------
filepath :
path to the input fasta/fastq file
fmt : []
"fasta", "fastq" or "auto" (default: "auto")
read_count_in_id:
read counts in sequence id
kwargs:
keyward arguments
"""
seq_count = {}
for rec in read_fastx(filepath, **kwargs):
seq = str(rec.seq).replace("-", "")
if read_count_in_id:
count = int(rec.id.split(":")[-1])
idx = "_".join(rec.id.split("_")[:-1])
if seq in seq_count.keys():
seq_count[seq][0] += count
seq_count[seq][1] += [idx]
else:
seq_count[seq] = [count, [idx]]
else:
if seq in seq_count.keys():
seq_count[seq] += 1
else:
seq_count[seq] = 1
if read_count_in_id:
seq_count = dict(
sorted(
seq_count.items(), key=lambda x: (len(x[1][1]), x[1][0]), reverse=True,
)
)
else:
seq_count = dict(sorted(seq_count.items(), key=lambda x: x[1], reverse=True))
return seq_count
def gen_dict_from_table(filepath, key, value, header=True, delimiter=","):
"""
Generate a dict object from a tablular file.
Parameters
----------
filepath : str or Path
path to the input tabular file
key : int or str
column index or name for dict key
value : int or str or list
column index or name for dict value
header : bool
Whether the input file contains a header row (default: True)
delimiter : str
delimiter character of the input file (default: ",")
"""
filepath = Path(filepath)
check_file(filepath)
if not delimiter:
if filepath.suffix == ".csv":
delimiter = ","
if filepath.suffix == ".tsv":
delimiter = r"\t+"
if filepath.suffix == ".txt":
delimiter = None
with filepath.open() as f:
line = f.readline()
if header:
hd = line.strip().split(delimiter)
line = f.readline()
else:
hd = []
items_list = []
while line:
items_list.append([parse_item(x) for x in line.strip().split(delimiter)])
line = f.readline().strip()
if not hd and not isinstance(key, int):
raise TypeError("key must be int when no header row in input file")
else:
if isinstance(key, int):
idx0 = key
else:
try:
idx0 = hd.index(key)
except ValueError:
raise ValueError("Column '{}' not found in {}".format(key, filepath))
if isinstance(value, str) or isinstance(value, int):
value = [value]
elif not isinstance(value, list):
raise TypeError("value must be a str, int, or list")
if np.array(value).dtype == "int64":
idx1 = value
else:
try:
idx1 = [hd.index(v) for v in value]
except ValueError:
not_found = ", ".join([v for v in value if v not in hd])
raise ValueError("Column '{}' not found in {}".format(not_found, filepath))
if len(idx1) > 1:
return {items[idx0]: [items[x] for x in idx1] for items in items_list}
else:
return {items[idx0]: items[idx1[0]] for items in items_list}
def guess_fmt(filepath):
"""
Guess the file format (FASTA/FASTQ) of the input sequence file
"""
filepath = Path(filepath)
if filepath.name.find("fastq") > -1:
fmt = "fastq"
elif filepath.name.find("fasta") > -1:
fmt = "fasta"
elif count_records(filepath, "fastq", "-m 10"):
fmt = "fastq"
elif count_records(filepath, "fasta", "-m 10"):
fmt = "fasta"
else:
raise RuntimeError("Unable to determine file format")
return fmt
def parse_item(s):
return int(s) if s.isdigit() else float(s) if isfloat(s) else s
def isfloat(value):
try:
float(value)
return True
except ValueError:
return False | 0.66454 | 0.140956 |
""" This script pulls in the last traded price of BTC from Bitfinex and
Kraken and sends an e-mail alert if a trading signal (entry/ exit) is
received.
"""
import time, json, requests, smtplib
FROMID = '' # A string of the form <EMAIL>,
# Use gmx.com and not gmail as gmail will block your attempt
# security concerns.
PASSWORD = '' # password associated with FROMID
TOID = '' # A string of the form <EMAIL>
# Helper functions
def bitfinex():
# Get tick by tick data of last price from Bitfinex
# https://www.bitfinex.com/pages/api
bitFinexTick = requests.get("https://api.bitfinex.com/v1/ticker/btcusd")
return bitFinexTick.json()['last_price']
def kraken():
# Get tick by tick data of last price from Kraken
# https://www.kraken.com/help/api
krakenTick = requests.post('https://api.kraken.com/0/public/Ticker',
data=json.dumps({"pair":"XXBTZUSD"}),
headers={"content-type":"application/json"})
return krakenTick.json()['result']['XXBTZUSD']['c'][0]
def send_mail(fromid, toid, password, msg):
# Send an e-mail alert
server = smtplib.SMTP("smtp.gmx.com", 587 )
server.starttls()
server.login(FROMID, PASSWORD)
server.sendmail(FROMID, [TOID], msg) #TOID is wrapped in a list
pos = None
while True:
krakenUSDLive = float(kraken())
bitfinexUSDLive = float(bitfinex())
diff = bitfinexUSDLive - krakenUSDLive
if pos is None:
if diff < -0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = 'Long'
msg = '\n' + 'Potential BTC long spread: Bitifinex and Kraken.'
msg = msg + '\n' + 'Long Bitifinex and short Kraken.' + "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
if diff > 0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = 'Short'
msg = '\n' + 'Potential BTC short spread: Bitifinex and Kraken.'
msg = msg + '\n' + 'Short Bitifinex and long Kraken.' + "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
if pos is not None:
if pos == 'Long' and diff >= 0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = None
msg = '\n' + 'Close BTC long spread: Bitifinex and Kraken.'
msg = msg + '\n' + 'Close the Bitfinex long and Kraken short.'+ "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
if pos == 'Short' and diff <= -0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = None
msg = '\n' + 'Close BTC short spread: Bitifinex and Kraken'
msg = msg + '\n' + 'Close the Bitfinex short and Kraken long.' + "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
time.sleep(3600) # 3600 equals one hour. The API's are called every hour | crypto_aribtrage/mail_alert.py | """ This script pulls in the last traded price of BTC from Bitfinex and
Kraken and sends an e-mail alert if a trading signal (entry/ exit) is
received.
"""
import time, json, requests, smtplib
FROMID = '' # A string of the form <EMAIL>,
# Use gmx.com and not gmail as gmail will block your attempt
# security concerns.
PASSWORD = '' # password associated with FROMID
TOID = '' # A string of the form <EMAIL>
# Helper functions
def bitfinex():
# Get tick by tick data of last price from Bitfinex
# https://www.bitfinex.com/pages/api
bitFinexTick = requests.get("https://api.bitfinex.com/v1/ticker/btcusd")
return bitFinexTick.json()['last_price']
def kraken():
# Get tick by tick data of last price from Kraken
# https://www.kraken.com/help/api
krakenTick = requests.post('https://api.kraken.com/0/public/Ticker',
data=json.dumps({"pair":"XXBTZUSD"}),
headers={"content-type":"application/json"})
return krakenTick.json()['result']['XXBTZUSD']['c'][0]
def send_mail(fromid, toid, password, msg):
# Send an e-mail alert
server = smtplib.SMTP("smtp.gmx.com", 587 )
server.starttls()
server.login(FROMID, PASSWORD)
server.sendmail(FROMID, [TOID], msg) #TOID is wrapped in a list
pos = None
while True:
krakenUSDLive = float(kraken())
bitfinexUSDLive = float(bitfinex())
diff = bitfinexUSDLive - krakenUSDLive
if pos is None:
if diff < -0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = 'Long'
msg = '\n' + 'Potential BTC long spread: Bitifinex and Kraken.'
msg = msg + '\n' + 'Long Bitifinex and short Kraken.' + "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
if diff > 0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = 'Short'
msg = '\n' + 'Potential BTC short spread: Bitifinex and Kraken.'
msg = msg + '\n' + 'Short Bitifinex and long Kraken.' + "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
if pos is not None:
if pos == 'Long' and diff >= 0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = None
msg = '\n' + 'Close BTC long spread: Bitifinex and Kraken.'
msg = msg + '\n' + 'Close the Bitfinex long and Kraken short.'+ "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
if pos == 'Short' and diff <= -0.01*min(bitfinexUSDLive, krakenUSDLive):
pos = None
msg = '\n' + 'Close BTC short spread: Bitifinex and Kraken'
msg = msg + '\n' + 'Close the Bitfinex short and Kraken long.' + "\n"
send_mail(fromid=FROMID, toid=TOID, password=PASSWORD,
msg=msg)
time.sleep(3600) # 3600 equals one hour. The API's are called every hour | 0.448909 | 0.332121 |
from functools import wraps
import requests
import json
def CheckVPN(*args, **kwargs):
raise Exception("Function Not Finished")
def RequireVPN(*Dargs, **Dkwargs):
def decorator(function):
@wraps(function)
def wrapper(*Fargs, **Fkwargs):
RequirementsPassed = []
r = requests.get(IPAPI).json()
if "IP" in Dkwargs:
if r['query'] == Dkwargs['IP']:
RequirementsPassed.append(True)
if "Mobile" in Dkwargs:
if r['mobile'] == Dkwargs['Mobile']:
RequirementsPassed.append(True)
if "Proxy" in Dkwargs:
if r['proxy'] == Dkwargs['Proxy']:
RequirementsPassed.append(True)
if "Hosting" in Dkwargs:
if r['hosting'] == Dkwargs['Hosting']:
RequirementsPassed.append(True)
if "Reverse" in Dkwargs:
if r['reverse'] == Dkwargs['Reverse']:
RequirementsPassed.append(True)
if "ASName" in Dkwargs:
if r['asname'] == Dkwargs['ASName']:
RequirementsPassed.append(True)
if "AS" in Dkwargs:
if r['as'] == Dkwargs['AS']:
RequirementsPassed.append(True)
if "ISP" in Dkwargs:
if r['isp'] == Dkwargs['ISP']:
RequirementsPassed.append(True)
if "UTCOffset" in Dkwargs:
if r['offset'] == Dkwargs['UTCOffset']:
RequirementsPassed.append(True)
if "Continent" in Dkwargs:
if r['continent'] == Dkwargs['Continent']:
RequirementsPassed.append(True)
if "Country" in Dkwargs:
if r['country'] == Dkwargs['Country']:
RequirementsPassed.append(True)
if "City" in Dkwargs:
if r['city'] == Dkwargs['City']:
RequirementsPassed.append(True)
if "ZIP" in Dkwargs:
if r['zip'] == Dkwargs['ZIP']:
RequirementsPassed.append(True)
if len(RequirementsPassed) == len(Dkwargs):
return function(*Fargs, **Fkwargs)
else:
raise Exception("Requirements Not Passed")
return wrapper
return decorator | RequireVPN/RequireVPN.py | from functools import wraps
import requests
import json
def CheckVPN(*args, **kwargs):
raise Exception("Function Not Finished")
def RequireVPN(*Dargs, **Dkwargs):
def decorator(function):
@wraps(function)
def wrapper(*Fargs, **Fkwargs):
RequirementsPassed = []
r = requests.get(IPAPI).json()
if "IP" in Dkwargs:
if r['query'] == Dkwargs['IP']:
RequirementsPassed.append(True)
if "Mobile" in Dkwargs:
if r['mobile'] == Dkwargs['Mobile']:
RequirementsPassed.append(True)
if "Proxy" in Dkwargs:
if r['proxy'] == Dkwargs['Proxy']:
RequirementsPassed.append(True)
if "Hosting" in Dkwargs:
if r['hosting'] == Dkwargs['Hosting']:
RequirementsPassed.append(True)
if "Reverse" in Dkwargs:
if r['reverse'] == Dkwargs['Reverse']:
RequirementsPassed.append(True)
if "ASName" in Dkwargs:
if r['asname'] == Dkwargs['ASName']:
RequirementsPassed.append(True)
if "AS" in Dkwargs:
if r['as'] == Dkwargs['AS']:
RequirementsPassed.append(True)
if "ISP" in Dkwargs:
if r['isp'] == Dkwargs['ISP']:
RequirementsPassed.append(True)
if "UTCOffset" in Dkwargs:
if r['offset'] == Dkwargs['UTCOffset']:
RequirementsPassed.append(True)
if "Continent" in Dkwargs:
if r['continent'] == Dkwargs['Continent']:
RequirementsPassed.append(True)
if "Country" in Dkwargs:
if r['country'] == Dkwargs['Country']:
RequirementsPassed.append(True)
if "City" in Dkwargs:
if r['city'] == Dkwargs['City']:
RequirementsPassed.append(True)
if "ZIP" in Dkwargs:
if r['zip'] == Dkwargs['ZIP']:
RequirementsPassed.append(True)
if len(RequirementsPassed) == len(Dkwargs):
return function(*Fargs, **Fkwargs)
else:
raise Exception("Requirements Not Passed")
return wrapper
return decorator | 0.247078 | 0.06666 |
from typing import List
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from src.app.auth.logic import get_current_user, get_current_active_superuser
from src.app.base.utils.db import get_db
from src.app.user.models import User
from src.app.blog import schemas
from src.app.blog import service
blog_router = APIRouter()
@blog_router.post("/category", response_model=schemas.CategoryInDB)
def create_category(
item: schemas.CategoryCreate,
db: Session = Depends(get_db),
current: User = Depends(get_current_active_superuser)
):
"""Create category"""
return service.category.create(db_session=db, obj_in=item)
@blog_router.get("/category", response_model=List[schemas.CategoryInDB])
def get_list_category(db: Session = Depends(get_db)):
"""Get list category"""
return service.category.get_multi(db_session=db)
@blog_router.get("/category/{pk}", response_model=schemas.CategoryInDB)
def get_category(pk: int, db: Session = Depends(get_db)):
"""Get single category"""
query = service.category.get(db_session=db, id=pk)
if not query:
raise HTTPException(status_code=404, detail="Not found")
return query
@blog_router.post("/tag", response_model=schemas.Tag)
def create_tag(
item: schemas.TagCreateUpdate,
db: Session = Depends(get_db),
current: User = Depends(get_current_active_superuser)
):
"""Create tag"""
return service.tag.create(db_session=db, obj_in=item)
@blog_router.get("/tag", response_model=List[schemas.Tag])
def get_list_tag(db: Session = Depends(get_db)):
"""Get list tag"""
return service.tag.get_multi(db_session=db)
@blog_router.get("/tag/{pk}", response_model=schemas.Tag)
def get_tag(pk: int, db: Session = Depends(get_db)):
"""Get single tag"""
query = service.tag.get(db_session=db, id=pk)
if not query:
raise HTTPException(status_code=404, detail="Not found")
return query
@blog_router.post("/post", response_model=schemas.PostCreateUpdateInDB)
def create_post(
item: schemas.PostCreateUpdate,
db: Session = Depends(get_db),
user: User = Depends(get_current_active_superuser)
):
"""Create post"""
return service.post.create(db_session=db, obj_in=item, user=user)
@blog_router.get("/post", response_model=List[schemas.Post])
def get_list_post(db: Session = Depends(get_db)):
"""Get list post"""
return service.post.get_multi(db_session=db)
@blog_router.get("/post/{pk}", response_model=schemas.Post)
def get_post(pk: int, db: Session = Depends(get_db)):
"""Get single post"""
query = service.post.get(db_session=db, id=pk)
if not query:
raise HTTPException(status_code=404, detail="Not found")
return query | src/app/blog/api.py | from typing import List
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.orm import Session
from src.app.auth.logic import get_current_user, get_current_active_superuser
from src.app.base.utils.db import get_db
from src.app.user.models import User
from src.app.blog import schemas
from src.app.blog import service
blog_router = APIRouter()
@blog_router.post("/category", response_model=schemas.CategoryInDB)
def create_category(
item: schemas.CategoryCreate,
db: Session = Depends(get_db),
current: User = Depends(get_current_active_superuser)
):
"""Create category"""
return service.category.create(db_session=db, obj_in=item)
@blog_router.get("/category", response_model=List[schemas.CategoryInDB])
def get_list_category(db: Session = Depends(get_db)):
"""Get list category"""
return service.category.get_multi(db_session=db)
@blog_router.get("/category/{pk}", response_model=schemas.CategoryInDB)
def get_category(pk: int, db: Session = Depends(get_db)):
"""Get single category"""
query = service.category.get(db_session=db, id=pk)
if not query:
raise HTTPException(status_code=404, detail="Not found")
return query
@blog_router.post("/tag", response_model=schemas.Tag)
def create_tag(
item: schemas.TagCreateUpdate,
db: Session = Depends(get_db),
current: User = Depends(get_current_active_superuser)
):
"""Create tag"""
return service.tag.create(db_session=db, obj_in=item)
@blog_router.get("/tag", response_model=List[schemas.Tag])
def get_list_tag(db: Session = Depends(get_db)):
"""Get list tag"""
return service.tag.get_multi(db_session=db)
@blog_router.get("/tag/{pk}", response_model=schemas.Tag)
def get_tag(pk: int, db: Session = Depends(get_db)):
"""Get single tag"""
query = service.tag.get(db_session=db, id=pk)
if not query:
raise HTTPException(status_code=404, detail="Not found")
return query
@blog_router.post("/post", response_model=schemas.PostCreateUpdateInDB)
def create_post(
item: schemas.PostCreateUpdate,
db: Session = Depends(get_db),
user: User = Depends(get_current_active_superuser)
):
"""Create post"""
return service.post.create(db_session=db, obj_in=item, user=user)
@blog_router.get("/post", response_model=List[schemas.Post])
def get_list_post(db: Session = Depends(get_db)):
"""Get list post"""
return service.post.get_multi(db_session=db)
@blog_router.get("/post/{pk}", response_model=schemas.Post)
def get_post(pk: int, db: Session = Depends(get_db)):
"""Get single post"""
query = service.post.get(db_session=db, id=pk)
if not query:
raise HTTPException(status_code=404, detail="Not found")
return query | 0.633297 | 0.113481 |
from azureml.core import ComputeTarget, ScriptRunConfig, Experiment, Environment
from azureml.core import Dataset
from azureml.core.conda_dependencies import CondaDependencies
from azureml.core import ComputeTarget
from azureml.train.estimator import Estimator
def main(workspace):
# Load compute target
print("Loading compute target")
compute_target = ComputeTarget(
workspace=workspace,
name="githubcluster"
)
dataset_ds = Dataset.get_by_name(workspace=workspace, name='wine_dataset', version='latest')
# Load script parameters
print("Loading script parameters")
script_params = {
"--kernel": "linear",
"--penalty": 1.0,
"--ds": dataset_ds.as_named_input('dataset')
}
# Create experiment config
print("Creating experiment config")
estimator = Estimator(
source_directory="code/train",
entry_script="train.py",
script_params=script_params,
compute_target=compute_target,
pip_packages=["azureml-dataprep[pandas,fuse]", "scikit-learn", "pandas", "matplotlib"]
)
return estimator
#-------------------------
'''
def main(workspace):
# Load compute target
print("Loading compute target")
compute_target = ComputeTarget(workspace=workspace,name="githubcluster")
env = Environment("aml-mlops-template-env")
packages = CondaDependencies.create(conda_packages=['scikit-learn', 'pandas', 'matplotlib'],
pip_packages=['azureml-defaults'])
env.python.conda_dependencies = packages
compute_name='githubcluster'
dataset_ds = Dataset.get_by_name(workspace=workspace, name='wine_dataset', version='latest')
# Load script parameters which have been optimized during DS-experiment stage
print("Loading script parameters")
script_params = {
"--kernel": "linear",
"--penalty": 1.0,
"--ds": dataset_ds
}
# Create a script config
script_config = ScriptRunConfig(source_directory='code/train',
script='train.py',
arguments = ['--kernel', 'linear', '--penalty', 0.1, '--ds', dataset_ds.as_named_input('dataset')],
environment=env,
compute_target=compute_name
)
# Submit the experiment
experiment = Experiment(workspace=workspace, name='aml_mlops_template')
run = experiment.submit(config=script_config)
return run
'''
#------------------ | code/train/run_config.py | from azureml.core import ComputeTarget, ScriptRunConfig, Experiment, Environment
from azureml.core import Dataset
from azureml.core.conda_dependencies import CondaDependencies
from azureml.core import ComputeTarget
from azureml.train.estimator import Estimator
def main(workspace):
# Load compute target
print("Loading compute target")
compute_target = ComputeTarget(
workspace=workspace,
name="githubcluster"
)
dataset_ds = Dataset.get_by_name(workspace=workspace, name='wine_dataset', version='latest')
# Load script parameters
print("Loading script parameters")
script_params = {
"--kernel": "linear",
"--penalty": 1.0,
"--ds": dataset_ds.as_named_input('dataset')
}
# Create experiment config
print("Creating experiment config")
estimator = Estimator(
source_directory="code/train",
entry_script="train.py",
script_params=script_params,
compute_target=compute_target,
pip_packages=["azureml-dataprep[pandas,fuse]", "scikit-learn", "pandas", "matplotlib"]
)
return estimator
#-------------------------
'''
def main(workspace):
# Load compute target
print("Loading compute target")
compute_target = ComputeTarget(workspace=workspace,name="githubcluster")
env = Environment("aml-mlops-template-env")
packages = CondaDependencies.create(conda_packages=['scikit-learn', 'pandas', 'matplotlib'],
pip_packages=['azureml-defaults'])
env.python.conda_dependencies = packages
compute_name='githubcluster'
dataset_ds = Dataset.get_by_name(workspace=workspace, name='wine_dataset', version='latest')
# Load script parameters which have been optimized during DS-experiment stage
print("Loading script parameters")
script_params = {
"--kernel": "linear",
"--penalty": 1.0,
"--ds": dataset_ds
}
# Create a script config
script_config = ScriptRunConfig(source_directory='code/train',
script='train.py',
arguments = ['--kernel', 'linear', '--penalty', 0.1, '--ds', dataset_ds.as_named_input('dataset')],
environment=env,
compute_target=compute_name
)
# Submit the experiment
experiment = Experiment(workspace=workspace, name='aml_mlops_template')
run = experiment.submit(config=script_config)
return run
'''
#------------------ | 0.759047 | 0.414425 |
from datasets import load_dataset
import numpy as np
def dummy_msg(dset:str): return f"Using dummy dataset for {dset}"
class Dataloader:
'''A class that allows us to get batches of data from huggingface datsets'''
def __init__(self, dataset:str,batch_size:int,
transforms:list=list(),
train:bool=True,
shuffle:bool=False,
dummy:bool=False):
self.dataset = dataset.upper()
self.batch_size = batch_size
self.train = train
assert isinstance(self.dataset, str) and isinstance(self.batch_size, int)
if self.dataset == 'MNIST':
# Data source
# https://huggingface.co/datasets/mnist
if train:
#shape is image:(60000,28,28) label: (60000)
if not dummy:
self.dset = load_dataset('mnist', split='train')
else:
print(dummy_msg(self.dataset.lower().strip()))
image = np.clip(np.random.randn(60000,28,28) * 255, 0, 255)
label = np.random.randint(0,10,60000)
self.dset = {'image':image, 'label':label}
else:
if not dummy:
self.dset = load_dataset('mnist', split='test')
else:
print(dummy_msg(self.dataset.lower().strip()))
image = np.clip(np.random.randn(10000,28,28) * 255, 0, 255)
label = np.random.randint(0,10,10000)
self.dset = {'image':image, 'label':label}
self.data = self.dset['image']
self.label = self.dset['label']
del self.dset
self.pairs = list(zip(self.data, self.label))
if shuffle:
np.random.seed(123)
np.random.shuffle(self.pairs)
self.data, self.label = zip(*self.pairs)
self.data = np.array(list(map(lambda img: np.array(img) / 255, self.data))).reshape(-1, 28, 28)
self.label = np.array(self.label).reshape(-1)
def __iter__(self):
self.index = 0
return self
def __next__(self):
if self.index < len(self.pairs) - 1:
batch_data = self.data[self.index:self.index+self.batch_size]
batch_label = self.label[self.index:self.index+self.batch_size]
self.index += self.batch_size
# we want to return a tuple of two npArrays of shape 32x28x28 and 32x1
# getdata() has been depreciated
return batch_data, batch_label
else:
raise StopIteration
def __len__(self):
return len(self.pairs) // self.batch_size | autograd/dataloader.py | from datasets import load_dataset
import numpy as np
def dummy_msg(dset:str): return f"Using dummy dataset for {dset}"
class Dataloader:
'''A class that allows us to get batches of data from huggingface datsets'''
def __init__(self, dataset:str,batch_size:int,
transforms:list=list(),
train:bool=True,
shuffle:bool=False,
dummy:bool=False):
self.dataset = dataset.upper()
self.batch_size = batch_size
self.train = train
assert isinstance(self.dataset, str) and isinstance(self.batch_size, int)
if self.dataset == 'MNIST':
# Data source
# https://huggingface.co/datasets/mnist
if train:
#shape is image:(60000,28,28) label: (60000)
if not dummy:
self.dset = load_dataset('mnist', split='train')
else:
print(dummy_msg(self.dataset.lower().strip()))
image = np.clip(np.random.randn(60000,28,28) * 255, 0, 255)
label = np.random.randint(0,10,60000)
self.dset = {'image':image, 'label':label}
else:
if not dummy:
self.dset = load_dataset('mnist', split='test')
else:
print(dummy_msg(self.dataset.lower().strip()))
image = np.clip(np.random.randn(10000,28,28) * 255, 0, 255)
label = np.random.randint(0,10,10000)
self.dset = {'image':image, 'label':label}
self.data = self.dset['image']
self.label = self.dset['label']
del self.dset
self.pairs = list(zip(self.data, self.label))
if shuffle:
np.random.seed(123)
np.random.shuffle(self.pairs)
self.data, self.label = zip(*self.pairs)
self.data = np.array(list(map(lambda img: np.array(img) / 255, self.data))).reshape(-1, 28, 28)
self.label = np.array(self.label).reshape(-1)
def __iter__(self):
self.index = 0
return self
def __next__(self):
if self.index < len(self.pairs) - 1:
batch_data = self.data[self.index:self.index+self.batch_size]
batch_label = self.label[self.index:self.index+self.batch_size]
self.index += self.batch_size
# we want to return a tuple of two npArrays of shape 32x28x28 and 32x1
# getdata() has been depreciated
return batch_data, batch_label
else:
raise StopIteration
def __len__(self):
return len(self.pairs) // self.batch_size | 0.670932 | 0.433322 |
import pytest
from rpi_backlight import Backlight, _EMULATOR_SYSFS_TMP_FILE_PATH
from rpi_backlight.utils import FakeBacklightSysfs
def test_constructor() -> None:
with pytest.raises(TypeError):
Backlight(board_type="foo") # type: ignore[arg-type]
assert not _EMULATOR_SYSFS_TMP_FILE_PATH.exists()
with pytest.raises(RuntimeError):
Backlight(backlight_sysfs_path=":emulator:")
def test_get_fade_duration() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.fade_duration == 0
def test_set_fade_duration() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
backlight.fade_duration = 0.5
assert backlight.fade_duration == 0.5
backlight.fade_duration = 1
assert backlight.fade_duration == 1
with pytest.raises(ValueError):
backlight.fade_duration = -1
with pytest.raises(TypeError):
backlight.fade_duration = "foo" # type: ignore[assignment]
with pytest.raises(TypeError):
backlight.fade_duration = True
def test_get_brightness() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.brightness == 100
def test_set_brightness() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
backlight.brightness = 50
assert backlight.brightness == 50
backlight.brightness = 0
assert backlight.brightness == 0
with pytest.raises(TypeError):
backlight.brightness = "foo" # type: ignore[assignment]
with pytest.raises(TypeError):
backlight.brightness = True
with pytest.raises(ValueError):
backlight.brightness = 101
with pytest.raises(ValueError):
backlight.brightness = -1
def test_get_power() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.power is True
def test_set_power() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
backlight.power = False
assert backlight.power is False
backlight.power = True
assert backlight.power is True
with pytest.raises(TypeError):
backlight.power = "foo" # type: ignore[assignment]
with pytest.raises(TypeError):
backlight.power = 1 # type: ignore[assignment]
def test_fade() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.fade_duration == 0
backlight.fade_duration = 0.1
assert backlight.fade_duration == 0.1
with backlight.fade(duration=0.5) as _val:
assert _val is None
assert backlight.fade_duration == 0.5
assert backlight.fade_duration == 0.1 | tests/test_backlight.py | import pytest
from rpi_backlight import Backlight, _EMULATOR_SYSFS_TMP_FILE_PATH
from rpi_backlight.utils import FakeBacklightSysfs
def test_constructor() -> None:
with pytest.raises(TypeError):
Backlight(board_type="foo") # type: ignore[arg-type]
assert not _EMULATOR_SYSFS_TMP_FILE_PATH.exists()
with pytest.raises(RuntimeError):
Backlight(backlight_sysfs_path=":emulator:")
def test_get_fade_duration() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.fade_duration == 0
def test_set_fade_duration() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
backlight.fade_duration = 0.5
assert backlight.fade_duration == 0.5
backlight.fade_duration = 1
assert backlight.fade_duration == 1
with pytest.raises(ValueError):
backlight.fade_duration = -1
with pytest.raises(TypeError):
backlight.fade_duration = "foo" # type: ignore[assignment]
with pytest.raises(TypeError):
backlight.fade_duration = True
def test_get_brightness() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.brightness == 100
def test_set_brightness() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
backlight.brightness = 50
assert backlight.brightness == 50
backlight.brightness = 0
assert backlight.brightness == 0
with pytest.raises(TypeError):
backlight.brightness = "foo" # type: ignore[assignment]
with pytest.raises(TypeError):
backlight.brightness = True
with pytest.raises(ValueError):
backlight.brightness = 101
with pytest.raises(ValueError):
backlight.brightness = -1
def test_get_power() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.power is True
def test_set_power() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
backlight.power = False
assert backlight.power is False
backlight.power = True
assert backlight.power is True
with pytest.raises(TypeError):
backlight.power = "foo" # type: ignore[assignment]
with pytest.raises(TypeError):
backlight.power = 1 # type: ignore[assignment]
def test_fade() -> None:
with FakeBacklightSysfs() as backlight_sysfs:
backlight = Backlight(backlight_sysfs_path=backlight_sysfs.path)
assert backlight.fade_duration == 0
backlight.fade_duration = 0.1
assert backlight.fade_duration == 0.1
with backlight.fade(duration=0.5) as _val:
assert _val is None
assert backlight.fade_duration == 0.5
assert backlight.fade_duration == 0.1 | 0.583797 | 0.422445 |
import pickle
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
Xs = pickle.load(open('binarized_xs.pkl', 'rb'))
ys = pickle.load(open('binarized_ys.pkl', 'rb'))
l2_model_complexity = np.zeros((10, 15))
l2_num_zero_weights = np.zeros((10, 15))
l1_num_zero_weights = np.zeros((10, 15))
l2_train_cll = np.zeros((10, 15))
l2_test_cll = np.zeros((10, 15))
def l2_complexity(w0, ws):
c = w0**2
for w in ws:
c += w**2
return c
def number_of_zeros(w0, ws):
count = 0
if w0 == 0:
count+=1
count+=ws.tolist().count(0)
return count
def cll(plp, idx):
s = 0
for i in range(len(idx)):
s += plp[i, idx[i]]
return s
for i_dataset in range(10):
X, y = Xs[i_dataset], ys[i_dataset]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=1./3, random_state=1527)
y_train_indices = [0 if i==False else 1 for i in y_train]
y_test_indices = [0 if i==False else 1 for i in y_test]
for i_c in range(-7,8):
clfl2 = LogisticRegression(penalty='l2', C=10**i_c, random_state=42).fit(X_train, y_train)
l2_model_complexity[i_dataset][i_c+7] = l2_complexity(clfl2.intercept_, clfl2.coef_[0])
l2_num_zero_weights[i_dataset][i_c+7] = number_of_zeros(clfl2.intercept_, clfl2.coef_[0])
l2_train_cll[i_dataset][i_c+7] = cll(clfl2.predict_log_proba(X_train), y_train_indices)
l2_test_cll[i_dataset][i_c+7] = cll(clfl2.predict_log_proba(X_test), y_test_indices)
clfl1 = LogisticRegression(penalty='l1', C=10**i_c, random_state=42).fit(X_train, y_train)
l1_num_zero_weights[i_dataset][i_c+7] = number_of_zeros(clfl1.intercept_, clfl1.coef_[0])
for i in range(10):
_, ax = plt.subplots()
ax.set_title('Dataset %d'%(i+1))
ax.plot(l2_model_complexity[i], l2_train_cll[i], label='train_cll')
ax.plot(l2_model_complexity[i], l2_test_cll[i], label='test_cll')
ax.legend()
plt.show()
for i in range(10):
_, ax = plt.subplots()
ax.set_title('Dataset %d'%(i+1))
ax.plot(np.arange(-7,8), l2_num_zero_weights[i], label='l2_num_zero')
ax.plot(np.arange(-7,8), l1_num_zero_weights[i], label='l1_num_zero')
ax.legend()
plt.show()
pickle.dump((l2_model_complexity, l2_train_cll, l2_test_cll, l2_num_zero_weights, l1_num_zero_weights), open('result.pkl', 'wb')) | PA3/code.py | import pickle
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
Xs = pickle.load(open('binarized_xs.pkl', 'rb'))
ys = pickle.load(open('binarized_ys.pkl', 'rb'))
l2_model_complexity = np.zeros((10, 15))
l2_num_zero_weights = np.zeros((10, 15))
l1_num_zero_weights = np.zeros((10, 15))
l2_train_cll = np.zeros((10, 15))
l2_test_cll = np.zeros((10, 15))
def l2_complexity(w0, ws):
c = w0**2
for w in ws:
c += w**2
return c
def number_of_zeros(w0, ws):
count = 0
if w0 == 0:
count+=1
count+=ws.tolist().count(0)
return count
def cll(plp, idx):
s = 0
for i in range(len(idx)):
s += plp[i, idx[i]]
return s
for i_dataset in range(10):
X, y = Xs[i_dataset], ys[i_dataset]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=1./3, random_state=1527)
y_train_indices = [0 if i==False else 1 for i in y_train]
y_test_indices = [0 if i==False else 1 for i in y_test]
for i_c in range(-7,8):
clfl2 = LogisticRegression(penalty='l2', C=10**i_c, random_state=42).fit(X_train, y_train)
l2_model_complexity[i_dataset][i_c+7] = l2_complexity(clfl2.intercept_, clfl2.coef_[0])
l2_num_zero_weights[i_dataset][i_c+7] = number_of_zeros(clfl2.intercept_, clfl2.coef_[0])
l2_train_cll[i_dataset][i_c+7] = cll(clfl2.predict_log_proba(X_train), y_train_indices)
l2_test_cll[i_dataset][i_c+7] = cll(clfl2.predict_log_proba(X_test), y_test_indices)
clfl1 = LogisticRegression(penalty='l1', C=10**i_c, random_state=42).fit(X_train, y_train)
l1_num_zero_weights[i_dataset][i_c+7] = number_of_zeros(clfl1.intercept_, clfl1.coef_[0])
for i in range(10):
_, ax = plt.subplots()
ax.set_title('Dataset %d'%(i+1))
ax.plot(l2_model_complexity[i], l2_train_cll[i], label='train_cll')
ax.plot(l2_model_complexity[i], l2_test_cll[i], label='test_cll')
ax.legend()
plt.show()
for i in range(10):
_, ax = plt.subplots()
ax.set_title('Dataset %d'%(i+1))
ax.plot(np.arange(-7,8), l2_num_zero_weights[i], label='l2_num_zero')
ax.plot(np.arange(-7,8), l1_num_zero_weights[i], label='l1_num_zero')
ax.legend()
plt.show()
pickle.dump((l2_model_complexity, l2_train_cll, l2_test_cll, l2_num_zero_weights, l1_num_zero_weights), open('result.pkl', 'wb')) | 0.341473 | 0.375649 |
import os
import sys
sys.path.append(os.getcwd())
import pandas
import numpy
import json
import time
from scripts.windows.windows import BaseWindowsControl, ProcessMonitoring
from scripts.windows.journalist import BasicLogs
from scripts.prettyCode.prettyPrint import PrettyPrint
PRETTYPRINT = PrettyPrint()
class DataAbacus():
def __init__(self, *args, **kwargs) -> None:
self.logName = kwargs.get('logName', None)
assert self.logName, 'Can not find logname.'
self.logObj = BasicLogs.handler(logName=self.logName, mark='dispatch')
self.logObj.logHandler().info('Initialize DataAnacus(abacus) class instance.')
# pandas分析perfmon数据结果列数
self.pdFps = 2
self.pdVMemory = 4
with open(r'..\config\config.json', 'r', encoding='utf-8') as f:
config = json.load(f)
self.abacusConfig = config.get('AbacusDictionary')
self.standardConfig = config.get('Standard')
self.remoteCheck = config.get('Remote')
def __str__(self) -> str:
return 'BaseAbacus'
def averageData(self, dataSeries: object):
data = self.toFloat(dataSeries.median(), 2)
return data
def maxData(self, dataSeries: object):
data = self.toFloat(dataSeries.max(), 2)
return data
def cleanPerfMonData(self, path) -> list:
file = pandas.read_table(path, header=None, sep='\t', engine='python')
# file -> DataFrame
file = file.drop(labels=0)
fpsColumn, virtualMemoryColumn = file[self.pdFps], file[self.pdVMemory]
# fpsColumn, virtualMemoryColumn -> series
return (fpsColumn, virtualMemoryColumn)
def toFloat(self, numpyFloat, decimals):
if isinstance(numpyFloat, str):
dataFloat = float(numpyFloat)
return round(dataFloat, decimals)
return numpy.around(numpyFloat, decimals=decimals)
def _printVRAMResult(self, avg, max, modelStandard):
if avg > modelStandard:
# 内存超标
difference = avg - modelStandard
avg = self.toFloat(avg, 2)
max = self.toFloat(max, 2)
PRETTYPRINT.pPrint(
'存在超标缺陷, 标准(STANDARD): {} MB, 实际平均(AVG): {} MB, 超标: {} MB, 最大: {} MB'.format(modelStandard, avg, difference, max),
'WARING',
bold=True
)
self.logObj.logHandler().info('Existence of over-standard defects, standard (STANDARD): {} MB, actual average (AVG): {} MB, over-standard: {} MB, MAX: {} MB'.format(modelStandard, avg, difference, max))
return (False, int(avg))
else:
PRETTYPRINT.pPrint('不存在内存超标缺陷')
self.logObj.logHandler().info('There is no memory excess defect.')
return (True, int(avg))
def _printFPSResult(self, avg, max, modelStandard):
# 具体数值差值比较
if avg < modelStandard:
avg = self.toFloat(avg, 2)
max = self.toFloat(max, 2)
# FPS不达标
difference = modelStandard - avg
PRETTYPRINT.pPrint(
'存在FPS缺陷, 标准(STANDARD): {} frame, 实际平均(AVG): {} frame, 不达标: {} frame, 最大: {} frame'.format(modelStandard, avg, difference, max),
'WARING',
bold=True
)
self.logObj.logHandler().info('Existence of over-standard defects, standard (STANDARD): {} frame, actual average (AVG): {} frame, over-standard: {} frame, MAX: {} MB'.format(modelStandard, avg, difference, max))
return (False, int(avg))
else:
PRETTYPRINT.pPrint('不存在FPS超标缺陷')
self.logObj.logHandler().info('There is no FPS excess defect.')
return (True, int(avg))
def clean(self, dataNumpyList, model, ci, *args, **kwargs):
"""数据比较大小分析
Args:
dataNumpyList (object): numpy array.
model (str): Configuration model.
ci (str): Comparison item.
Raises:
AttributeError: Exception method attribute.
Returns:
bool: true or false, analysis result.
"""
# 获取传入数据平均值和最大值
avg = int(self.averageData(dataNumpyList))
max = int(self.maxData(dataNumpyList))
PRETTYPRINT.pPrint('ci: {}, max: {}, avg: {}'.format(ci, avg, max))
self.logObj.logHandler().info('ci: {}, max: {}, avg: {}'.format(ci, avg, max))
# 获取标准并计算
if ci == 'FPS':
modelStandard = self.standardConfig.get('FPS').get(model)
return self._printFPSResult(avg, max, modelStandard)
elif ci == 'VRAM':
modelStandard = self.standardConfig.get('VRAM').get(model)
avg, max = avg / 1024, max / 1024
return self._printVRAMResult(avg, max, modelStandard)
else:
PRETTYPRINT.pPrint('传参错误, 异常method属性', 'ERROR', bold=True)
self.logObj.logHandler().error('[P3] Pass parameter error, abnormal method attribute')
raise AttributeError('异常method属性.')
class VRAMAbacus(DataAbacus):
def __init__(self, dataFilePath, model, *args, **kwargs) -> None:
"""虚拟内存分析
- 虚拟内存
Args:
dataFilePath (str): 数据文件路径
model (str): 测试机机型
"""
super().__init__(*args, **kwargs)
# 获取内存标准
self.VRAMStandard = self.standardConfig.get('VRAM')
self.dataFilePath = dataFilePath
self.model = model
def __str__(self) -> str:
return 'VRAM'
def dispatch(self, *args, **kwargs):
PRETTYPRINT.pPrint('开始分析 - 虚拟内存')
VRAMNumpyList = self.cleanPerfMonData(self.dataFilePath)[1]
result = self.clean(VRAMNumpyList, self.model, 'VRAM')
return result
class FPSAbacus(DataAbacus):
"""FPS内存分析
Args:
dataFilePath (str): 数据文件路径
model (str): 测试机机型
"""
def __init__(self, dataFilePath, model, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
# 获取FPS标准
self.VRAMStandard = self.standardConfig.get('FPS')
self.dataFilePath = dataFilePath
self.model = model
def __str__(self) -> str:
return 'FPS'
def dispatch(self, *args, **kwargs):
PRETTYPRINT.pPrint('开始分析 - FPS')
FPSNumpyList = self.cleanPerfMonData(self.dataFilePath)[0]
result = self.clean(FPSNumpyList, self.model, 'FPS')
return result
class CrashAbacus(DataAbacus):
'''
1. 截图
2. 查找进程
'''
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.logName = kwargs.get('logName', None)
assert self.logName, 'Can not find logname.'
self.logObj = BasicLogs.handler(logName=self.logName, mark='dispatch')
self.processMonitoringObj = ProcessMonitoring(logName=self.logName)
self.logObj.logHandler().info('Initialize CrashAbacus(abacus) class instance.')
def __str__(self) -> str:
return 'Crash'
def dispatch(self, version, startingCheck=False, *args, **kwargs) -> bool:
# 获取标识符
with open(r'..\caches\FileRealVersion.json', 'r', encoding='utf-8') as f:
# uid = ALPHA_xxx
uid = json.load(f).get('uid')
# 保存数据文件夹目录
if not startingCheck:
savePath = '..\caches\crashCertificate\{}'.format(uid)
else:
savePath = os.path.join('.', 'caches', 'startingCrashCheck', uid)
BaseWindowsControl.whereIsTheDir(savePath, 1)
PRETTYPRINT.pPrint('识别到宕机窗口,正在获取焦点')
self.logObj.logHandler().info('A down window is recognized and it is getting focus.')
errorMsg = BaseWindowsControl.activationWindow('错误报告', '#32770')
if errorMsg:
self.logObj.logHandler().error(errorMsg)
if savePath:
# 截图 -> 捕捉可能出现的宕机界面
imgSavePath = os.path.join(savePath, '{}_{}.jpg'.format(uid, version))
PRETTYPRINT.pPrint('已截图当前显示器内容')
self.logObj.logHandler().info('Screenshot of the current display content: {}'.format(imgSavePath))
BaseWindowsControl.screenshots(imgSavePath)
if __name__ == '__main__':
pass | scripts/dataAnalysis/abacus.py | import os
import sys
sys.path.append(os.getcwd())
import pandas
import numpy
import json
import time
from scripts.windows.windows import BaseWindowsControl, ProcessMonitoring
from scripts.windows.journalist import BasicLogs
from scripts.prettyCode.prettyPrint import PrettyPrint
PRETTYPRINT = PrettyPrint()
class DataAbacus():
def __init__(self, *args, **kwargs) -> None:
self.logName = kwargs.get('logName', None)
assert self.logName, 'Can not find logname.'
self.logObj = BasicLogs.handler(logName=self.logName, mark='dispatch')
self.logObj.logHandler().info('Initialize DataAnacus(abacus) class instance.')
# pandas分析perfmon数据结果列数
self.pdFps = 2
self.pdVMemory = 4
with open(r'..\config\config.json', 'r', encoding='utf-8') as f:
config = json.load(f)
self.abacusConfig = config.get('AbacusDictionary')
self.standardConfig = config.get('Standard')
self.remoteCheck = config.get('Remote')
def __str__(self) -> str:
return 'BaseAbacus'
def averageData(self, dataSeries: object):
data = self.toFloat(dataSeries.median(), 2)
return data
def maxData(self, dataSeries: object):
data = self.toFloat(dataSeries.max(), 2)
return data
def cleanPerfMonData(self, path) -> list:
file = pandas.read_table(path, header=None, sep='\t', engine='python')
# file -> DataFrame
file = file.drop(labels=0)
fpsColumn, virtualMemoryColumn = file[self.pdFps], file[self.pdVMemory]
# fpsColumn, virtualMemoryColumn -> series
return (fpsColumn, virtualMemoryColumn)
def toFloat(self, numpyFloat, decimals):
if isinstance(numpyFloat, str):
dataFloat = float(numpyFloat)
return round(dataFloat, decimals)
return numpy.around(numpyFloat, decimals=decimals)
def _printVRAMResult(self, avg, max, modelStandard):
if avg > modelStandard:
# 内存超标
difference = avg - modelStandard
avg = self.toFloat(avg, 2)
max = self.toFloat(max, 2)
PRETTYPRINT.pPrint(
'存在超标缺陷, 标准(STANDARD): {} MB, 实际平均(AVG): {} MB, 超标: {} MB, 最大: {} MB'.format(modelStandard, avg, difference, max),
'WARING',
bold=True
)
self.logObj.logHandler().info('Existence of over-standard defects, standard (STANDARD): {} MB, actual average (AVG): {} MB, over-standard: {} MB, MAX: {} MB'.format(modelStandard, avg, difference, max))
return (False, int(avg))
else:
PRETTYPRINT.pPrint('不存在内存超标缺陷')
self.logObj.logHandler().info('There is no memory excess defect.')
return (True, int(avg))
def _printFPSResult(self, avg, max, modelStandard):
# 具体数值差值比较
if avg < modelStandard:
avg = self.toFloat(avg, 2)
max = self.toFloat(max, 2)
# FPS不达标
difference = modelStandard - avg
PRETTYPRINT.pPrint(
'存在FPS缺陷, 标准(STANDARD): {} frame, 实际平均(AVG): {} frame, 不达标: {} frame, 最大: {} frame'.format(modelStandard, avg, difference, max),
'WARING',
bold=True
)
self.logObj.logHandler().info('Existence of over-standard defects, standard (STANDARD): {} frame, actual average (AVG): {} frame, over-standard: {} frame, MAX: {} MB'.format(modelStandard, avg, difference, max))
return (False, int(avg))
else:
PRETTYPRINT.pPrint('不存在FPS超标缺陷')
self.logObj.logHandler().info('There is no FPS excess defect.')
return (True, int(avg))
def clean(self, dataNumpyList, model, ci, *args, **kwargs):
"""数据比较大小分析
Args:
dataNumpyList (object): numpy array.
model (str): Configuration model.
ci (str): Comparison item.
Raises:
AttributeError: Exception method attribute.
Returns:
bool: true or false, analysis result.
"""
# 获取传入数据平均值和最大值
avg = int(self.averageData(dataNumpyList))
max = int(self.maxData(dataNumpyList))
PRETTYPRINT.pPrint('ci: {}, max: {}, avg: {}'.format(ci, avg, max))
self.logObj.logHandler().info('ci: {}, max: {}, avg: {}'.format(ci, avg, max))
# 获取标准并计算
if ci == 'FPS':
modelStandard = self.standardConfig.get('FPS').get(model)
return self._printFPSResult(avg, max, modelStandard)
elif ci == 'VRAM':
modelStandard = self.standardConfig.get('VRAM').get(model)
avg, max = avg / 1024, max / 1024
return self._printVRAMResult(avg, max, modelStandard)
else:
PRETTYPRINT.pPrint('传参错误, 异常method属性', 'ERROR', bold=True)
self.logObj.logHandler().error('[P3] Pass parameter error, abnormal method attribute')
raise AttributeError('异常method属性.')
class VRAMAbacus(DataAbacus):
def __init__(self, dataFilePath, model, *args, **kwargs) -> None:
"""虚拟内存分析
- 虚拟内存
Args:
dataFilePath (str): 数据文件路径
model (str): 测试机机型
"""
super().__init__(*args, **kwargs)
# 获取内存标准
self.VRAMStandard = self.standardConfig.get('VRAM')
self.dataFilePath = dataFilePath
self.model = model
def __str__(self) -> str:
return 'VRAM'
def dispatch(self, *args, **kwargs):
PRETTYPRINT.pPrint('开始分析 - 虚拟内存')
VRAMNumpyList = self.cleanPerfMonData(self.dataFilePath)[1]
result = self.clean(VRAMNumpyList, self.model, 'VRAM')
return result
class FPSAbacus(DataAbacus):
"""FPS内存分析
Args:
dataFilePath (str): 数据文件路径
model (str): 测试机机型
"""
def __init__(self, dataFilePath, model, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
# 获取FPS标准
self.VRAMStandard = self.standardConfig.get('FPS')
self.dataFilePath = dataFilePath
self.model = model
def __str__(self) -> str:
return 'FPS'
def dispatch(self, *args, **kwargs):
PRETTYPRINT.pPrint('开始分析 - FPS')
FPSNumpyList = self.cleanPerfMonData(self.dataFilePath)[0]
result = self.clean(FPSNumpyList, self.model, 'FPS')
return result
class CrashAbacus(DataAbacus):
'''
1. 截图
2. 查找进程
'''
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.logName = kwargs.get('logName', None)
assert self.logName, 'Can not find logname.'
self.logObj = BasicLogs.handler(logName=self.logName, mark='dispatch')
self.processMonitoringObj = ProcessMonitoring(logName=self.logName)
self.logObj.logHandler().info('Initialize CrashAbacus(abacus) class instance.')
def __str__(self) -> str:
return 'Crash'
def dispatch(self, version, startingCheck=False, *args, **kwargs) -> bool:
# 获取标识符
with open(r'..\caches\FileRealVersion.json', 'r', encoding='utf-8') as f:
# uid = ALPHA_xxx
uid = json.load(f).get('uid')
# 保存数据文件夹目录
if not startingCheck:
savePath = '..\caches\crashCertificate\{}'.format(uid)
else:
savePath = os.path.join('.', 'caches', 'startingCrashCheck', uid)
BaseWindowsControl.whereIsTheDir(savePath, 1)
PRETTYPRINT.pPrint('识别到宕机窗口,正在获取焦点')
self.logObj.logHandler().info('A down window is recognized and it is getting focus.')
errorMsg = BaseWindowsControl.activationWindow('错误报告', '#32770')
if errorMsg:
self.logObj.logHandler().error(errorMsg)
if savePath:
# 截图 -> 捕捉可能出现的宕机界面
imgSavePath = os.path.join(savePath, '{}_{}.jpg'.format(uid, version))
PRETTYPRINT.pPrint('已截图当前显示器内容')
self.logObj.logHandler().info('Screenshot of the current display content: {}'.format(imgSavePath))
BaseWindowsControl.screenshots(imgSavePath)
if __name__ == '__main__':
pass | 0.435181 | 0.150029 |
import itertools
import sys
import util
import templates
from database_countries import code_to_country
from database_participants import year_grouped as p_db_y
from database_timeline import year_indexed as t_db_y
from database_timeline import previous_year
from database_timeline import next_year
from database_teams import year_grouped as team_db_y
from database_rounds import year_grouped as r_db_y
from functools import cmp_to_key
def run(year):
print("Creating timeline/" + year + "/team")
html = templates.get("timeline/year/team")
html = templates.initial_replace(html, 1)
yeardata = t_db_y[year]
html = html.replace("__YEAR__", year)
html = html.replace("__NUMBER__", yeardata["number"])
html = html.replace("__ORDINAL__", util.ordinal(yeardata["number"]))
if year in previous_year:
html = html.replace("__PREVIOUS_YEAR__", previous_year[year])
html = html.replace("__PREVIOUS_YEAR_STYLE__", "")
else:
html = html.replace("__PREVIOUS_YEAR_STYLE__", "display: none;")
html = html.replace("__PREVIOUS_YEAR__", ".") # Google crawler fix
if year in next_year:
html = html.replace("__NEXT_YEAR__", next_year[year])
html = html.replace("__NEXT_YEAR_STYLE__", "")
else:
html = html.replace("__NEXT_YEAR_STYLE__", "display: none;")
html = html.replace("__NEXT_YEAR__", ".") # Google crawler fix
individual_rounds = 0
team_rounds = 0
individual_column = "<th data-sortinitialorder=\"desc\">Total</th>\n"
team_column = "<th data-sortinitialorder=\"desc\">Total</th>\n"
tablehtml = ""
prevcode = ""
prevrank = 0
if year in team_db_y:
if int(year) >= 1999:
for row in r_db_y[year]:
if row["points"] != "#":
if row["type"] == "Individual":
individual_rounds += 1
else:
team_rounds += 1
team_column += ""
for row in r_db_y[year]:
if row["points"] != "#":
if row["type"] == "Individual":
individual_column += "<th data-sortinitialorder=\"desc\">" + row["number"].split(" ")[-1] + "</th>\n"
else:
team_column += "<th data-sortinitialorder=\"desc\">" + row["number"].split(" ")[-1] + "</th>\n"
html = html.replace("__TEAM__", "<th class=\"sorter-false\" colspan=\"" + str(team_rounds+1) + "\">Team Rounds</th>\n")
html = html.replace("__INDIVIDUAL__", "<th class=\"sorter-false\" colspan=\"" + str(individual_rounds+1) + "\">Individual Rounds</th>\n")
html = html.replace("__TEAM_DETAILS__", team_column)
html = html.replace("__INDIVIDUAL_DETAILS__", individual_column)
for row in team_db_y[year]:
rowhtml = templates.get("timeline/year/team_row")
rowhtml = rowhtml.replace("__CODE__", row["code"])
if row["code"] != "???":
rowhtml = rowhtml.replace("__COUNTRY__", code_to_country[row["code"]])
else:
rowhtml = rowhtml.replace("__COUNTRY__", "???")
rowhtml = rowhtml.replace("__TIER__", row["tier"])
rowhtml = rowhtml.replace("__RANK__", row["rank"])
rowhtml = rowhtml.replace("__OFFICIAL_RANK__", row["official_rank"])
rowhtml = rowhtml.replace("__TOTAL_SCORE__", row["total_score"])
team_rounds_row = ""
for i in range(0, team_rounds+1):
if i == 0:
team_rounds_row += "<td align=\"right\">" + row["team_total"] + "</td>\n"
else:
team_rounds_row += "<td align=\"right\">" + row["team_" + str(i)] + "</td>\n"
rowhtml = rowhtml.replace("__TEAM_ROUNDS__", team_rounds_row)
individual_rounds_row = ""
for i in range(0, individual_rounds+1):
if i == 0:
individual_rounds_row += "<td align=\"right\">" + row["individual_total"] + "</td>\n"
else:
individual_rounds_row += "<td align=\"right\">" + row["individual_" + str(i)] + "</td>\n"
rowhtml = rowhtml.replace("__INDIVIDUAL_ROUNDS__", individual_rounds_row)
tablehtml += rowhtml
html = html.replace("__TABLE__", tablehtml)
html = templates.final_replace(html, "../..")
util.writefile("../timeline/" + year + "/team.html", html)
if __name__ == "__main__":
run(sys.argv[1]) | src/timeline_year_team.py | import itertools
import sys
import util
import templates
from database_countries import code_to_country
from database_participants import year_grouped as p_db_y
from database_timeline import year_indexed as t_db_y
from database_timeline import previous_year
from database_timeline import next_year
from database_teams import year_grouped as team_db_y
from database_rounds import year_grouped as r_db_y
from functools import cmp_to_key
def run(year):
print("Creating timeline/" + year + "/team")
html = templates.get("timeline/year/team")
html = templates.initial_replace(html, 1)
yeardata = t_db_y[year]
html = html.replace("__YEAR__", year)
html = html.replace("__NUMBER__", yeardata["number"])
html = html.replace("__ORDINAL__", util.ordinal(yeardata["number"]))
if year in previous_year:
html = html.replace("__PREVIOUS_YEAR__", previous_year[year])
html = html.replace("__PREVIOUS_YEAR_STYLE__", "")
else:
html = html.replace("__PREVIOUS_YEAR_STYLE__", "display: none;")
html = html.replace("__PREVIOUS_YEAR__", ".") # Google crawler fix
if year in next_year:
html = html.replace("__NEXT_YEAR__", next_year[year])
html = html.replace("__NEXT_YEAR_STYLE__", "")
else:
html = html.replace("__NEXT_YEAR_STYLE__", "display: none;")
html = html.replace("__NEXT_YEAR__", ".") # Google crawler fix
individual_rounds = 0
team_rounds = 0
individual_column = "<th data-sortinitialorder=\"desc\">Total</th>\n"
team_column = "<th data-sortinitialorder=\"desc\">Total</th>\n"
tablehtml = ""
prevcode = ""
prevrank = 0
if year in team_db_y:
if int(year) >= 1999:
for row in r_db_y[year]:
if row["points"] != "#":
if row["type"] == "Individual":
individual_rounds += 1
else:
team_rounds += 1
team_column += ""
for row in r_db_y[year]:
if row["points"] != "#":
if row["type"] == "Individual":
individual_column += "<th data-sortinitialorder=\"desc\">" + row["number"].split(" ")[-1] + "</th>\n"
else:
team_column += "<th data-sortinitialorder=\"desc\">" + row["number"].split(" ")[-1] + "</th>\n"
html = html.replace("__TEAM__", "<th class=\"sorter-false\" colspan=\"" + str(team_rounds+1) + "\">Team Rounds</th>\n")
html = html.replace("__INDIVIDUAL__", "<th class=\"sorter-false\" colspan=\"" + str(individual_rounds+1) + "\">Individual Rounds</th>\n")
html = html.replace("__TEAM_DETAILS__", team_column)
html = html.replace("__INDIVIDUAL_DETAILS__", individual_column)
for row in team_db_y[year]:
rowhtml = templates.get("timeline/year/team_row")
rowhtml = rowhtml.replace("__CODE__", row["code"])
if row["code"] != "???":
rowhtml = rowhtml.replace("__COUNTRY__", code_to_country[row["code"]])
else:
rowhtml = rowhtml.replace("__COUNTRY__", "???")
rowhtml = rowhtml.replace("__TIER__", row["tier"])
rowhtml = rowhtml.replace("__RANK__", row["rank"])
rowhtml = rowhtml.replace("__OFFICIAL_RANK__", row["official_rank"])
rowhtml = rowhtml.replace("__TOTAL_SCORE__", row["total_score"])
team_rounds_row = ""
for i in range(0, team_rounds+1):
if i == 0:
team_rounds_row += "<td align=\"right\">" + row["team_total"] + "</td>\n"
else:
team_rounds_row += "<td align=\"right\">" + row["team_" + str(i)] + "</td>\n"
rowhtml = rowhtml.replace("__TEAM_ROUNDS__", team_rounds_row)
individual_rounds_row = ""
for i in range(0, individual_rounds+1):
if i == 0:
individual_rounds_row += "<td align=\"right\">" + row["individual_total"] + "</td>\n"
else:
individual_rounds_row += "<td align=\"right\">" + row["individual_" + str(i)] + "</td>\n"
rowhtml = rowhtml.replace("__INDIVIDUAL_ROUNDS__", individual_rounds_row)
tablehtml += rowhtml
html = html.replace("__TABLE__", tablehtml)
html = templates.final_replace(html, "../..")
util.writefile("../timeline/" + year + "/team.html", html)
if __name__ == "__main__":
run(sys.argv[1]) | 0.077502 | 0.083965 |
import six
from girder_worker.core import io
from girder_worker.plugins.types import convert, isvalid, format
from .spec import Spec
class ValidationError(Exception):
"""An exception type raised when encountering invalid data types."""
message_format = (
'Input "{name}" (Python type "{python_type}") is not of the '
'expected type ("{type}") and format ("{format}")'
)
def __init__(self, port, data_spec):
"""Generate a data validation exception.
:param port: The port that encountered the error
:type port: :py:class:Port
:param dict data_spec: The data specification passed to the port.
"""
self.port = port
self.data_spec = data_spec
def __str__(self):
"""Initialize an error message for the exception."""
return self.message_format.format(
name=str(self.port.name),
python_type=str(type(self.data_spec.get('data'))),
type=str(self.port.type),
format=str(self.port.format)
)
class Port(Spec):
"""A port defines a communication channel between tasks.
Ports enable bidirectional communication between tasks and are responsible
for ensuring that the connections are compatible. The primary purpose of
ports is to specify what types of data tasks can read and write. This
information is used by tasks to determine if they can be connected. Ports
also provide documentation for the task by describing its inputs and
outputs. Ports also handle fetching data from and pushing data to remote
data stores.
>>> spec = {'name': 'a', 'type': 'number', 'format': 'number'}
>>> port = Port(spec)
The port object is serialized as a json object
>>> import json
>>> json.loads(str(port)) == spec
True
It has several properties derived from the spec
>>> port.name == spec['name']
True
>>> port.type == spec['type']
True
>>> port.format == spec['format']
True
It also supports auto converting formats and validation by default
>>> port.auto_convert
True
>>> port.auto_validate
True
Spec properties are automatically validated when setting them
>>> port = Port()
Traceback (most recent call last):
...
ValueError: Port specs require a valid name.
>>> port = Port(name="<NAME>", type="python", format="object")
>>> port.format = 'invalid'
Traceback (most recent call last):
...
ValueError: Unknown format "python.invalid"
Checking the ``type`` is deferred to allow incremental updating
>>> port['type'] = 'image'
>>> port.json()
Traceback (most recent call last):
...
ValueError: Unknown format "image.object"
>>> port.format = 'png'
>>> port.json()
'{"type": "image", "name": "<NAME>", "format": "png"}'
>>> port == Port(port)
True
"""
def __init__(self, *arg, **kw):
"""Initialize the port on a given task.
Extends the spec initialization by appending defaults and adding basic
validation. By default, port specs take "python.object" data.
"""
super(Port, self).__init__(*arg, **kw)
self.add_validation_check('Port.name', Port.__check_name)
self.add_validation_check('Port.type', Port.__check_types)
self.check()
def __check_name(self, key=None, oldvalue=None, newvalue=None, **kw):
"""Ensure that the spec has necessary keys."""
if 'name' not in self or not isinstance(self['name'], six.string_types):
raise ValueError('Port specs require a valid name.')
def __check_types(self, key=None, oldvalue=None, newvalue=None, **kw):
"""Ensure the data format given is known."""
if key in ('type', None) and not format.Validator(
self['type'], None).is_valid():
raise ValueError('Unknown type "%s"' % (self['type'],))
elif key in ('format', None) and not format.Validator(
self['type'], self['format']).is_valid():
raise ValueError(
'Unknown format "%s.%s"' % (self['type'], self['format'])
)
def validate(self, data_spec):
"""Ensure the given data spec is compatible with this port.
:param dict data_spec: Data specification
:returns: bool
>>> spec = {'name': 'a', 'type': 'number', 'format': 'number'}
>>> port = Port(spec)
>>> port.validate({'format': 'number', 'data': 1.5})
True
>>> port.validate({'format': 'json', 'data': '1.5'})
True
>>> port.validate({'format': 'number', 'data': '1.5'})
False
>>> port.validate({'format': 'unknown format', 'data': '...'})
False
"""
try:
return isvalid(self.type, data_spec)
except Exception: # catchall validation error
return False
def convert(self, data_spec, format):
"""Convert to a compatible data format.
:param dict data_spec: Data specification
:param str format: The target data format
:returns: dict
>>> spec = {'name': 'a', 'type': 'number', 'format': 'number'}
>>> port = Port(spec)
>>> new_spec = port.convert({'format': 'number', 'data': 1}, 'json')
>>> new_spec['format']
'json'
>>> port.fetch(new_spec)
1
"""
return convert(self.type, data_spec, {'format': format})
def fetch(self, data_spec):
"""Return the data described by the given specification.
:param dict data_spec: A data specification object
:returns: data
:raises ValidationError: when the validation check fails
>>> port = Port({'name': 'a', 'type': 'number', 'format': 'number'})
>>> port.fetch({'format': 'number', 'data': -1})
-1
"""
if self.auto_validate and not self.validate(data_spec):
raise ValidationError(self, data_spec)
if self.auto_convert:
_data = self.convert(data_spec, self.format)
data = _data.get('data')
elif self.format == data_spec.get('format'):
# TODO: This doesn't look right...
if 'data' in self and self['data'] is not None:
data = self['data']
else:
data = io.fetch(data_spec, task_input=self).get('data')
else:
raise Exception('Expected matching data formats ({} != {})' % (
str(data_spec['format']), str(self.format)
))
return data
def push(self, data_spec):
"""Write data a to remote destination according the to specification.
:param dict data_spec: A data specification object
:returns: dict
>>> port = Port({'name': 'a', 'type': 'number', 'format': 'number'})
>>> port.push({'format': 'json', 'mode': 'inline', 'data': '2'})['data']
2
>>> port.push({'format': 'number', 'mode': 'inline', 'data': 3})['data']
3
"""
_spec = data_spec
if self.auto_validate and not self.validate(_spec):
raise ValidationError(self, _spec)
if self.auto_convert:
_spec = self.convert(_spec, self.format)
elif _spec['format'] == self.format:
data = data_spec.get('script_data') # Is this always a task output?
io.push(data, _spec, task_output=self.spec)
else:
raise Exception('Expected matching data formats ({} != {})' % (
str(_spec['format']), str(self.format)
))
return _spec
Port.make_property('name', 'The name of the port')
Port.make_property('type', 'The data type of the port', 'python')
Port.make_property('format', 'The data format of the port', 'object')
Port.make_property('auto_convert', 'If the data format is automatically', True)
Port.make_property('auto_validate', 'If the data is validated by default', True)
__all__ = (
'Port',
'ValidationError'
) | packages/girder_worker/girder_worker/core/specs/port.py |
import six
from girder_worker.core import io
from girder_worker.plugins.types import convert, isvalid, format
from .spec import Spec
class ValidationError(Exception):
"""An exception type raised when encountering invalid data types."""
message_format = (
'Input "{name}" (Python type "{python_type}") is not of the '
'expected type ("{type}") and format ("{format}")'
)
def __init__(self, port, data_spec):
"""Generate a data validation exception.
:param port: The port that encountered the error
:type port: :py:class:Port
:param dict data_spec: The data specification passed to the port.
"""
self.port = port
self.data_spec = data_spec
def __str__(self):
"""Initialize an error message for the exception."""
return self.message_format.format(
name=str(self.port.name),
python_type=str(type(self.data_spec.get('data'))),
type=str(self.port.type),
format=str(self.port.format)
)
class Port(Spec):
"""A port defines a communication channel between tasks.
Ports enable bidirectional communication between tasks and are responsible
for ensuring that the connections are compatible. The primary purpose of
ports is to specify what types of data tasks can read and write. This
information is used by tasks to determine if they can be connected. Ports
also provide documentation for the task by describing its inputs and
outputs. Ports also handle fetching data from and pushing data to remote
data stores.
>>> spec = {'name': 'a', 'type': 'number', 'format': 'number'}
>>> port = Port(spec)
The port object is serialized as a json object
>>> import json
>>> json.loads(str(port)) == spec
True
It has several properties derived from the spec
>>> port.name == spec['name']
True
>>> port.type == spec['type']
True
>>> port.format == spec['format']
True
It also supports auto converting formats and validation by default
>>> port.auto_convert
True
>>> port.auto_validate
True
Spec properties are automatically validated when setting them
>>> port = Port()
Traceback (most recent call last):
...
ValueError: Port specs require a valid name.
>>> port = Port(name="<NAME>", type="python", format="object")
>>> port.format = 'invalid'
Traceback (most recent call last):
...
ValueError: Unknown format "python.invalid"
Checking the ``type`` is deferred to allow incremental updating
>>> port['type'] = 'image'
>>> port.json()
Traceback (most recent call last):
...
ValueError: Unknown format "image.object"
>>> port.format = 'png'
>>> port.json()
'{"type": "image", "name": "<NAME>", "format": "png"}'
>>> port == Port(port)
True
"""
def __init__(self, *arg, **kw):
"""Initialize the port on a given task.
Extends the spec initialization by appending defaults and adding basic
validation. By default, port specs take "python.object" data.
"""
super(Port, self).__init__(*arg, **kw)
self.add_validation_check('Port.name', Port.__check_name)
self.add_validation_check('Port.type', Port.__check_types)
self.check()
def __check_name(self, key=None, oldvalue=None, newvalue=None, **kw):
"""Ensure that the spec has necessary keys."""
if 'name' not in self or not isinstance(self['name'], six.string_types):
raise ValueError('Port specs require a valid name.')
def __check_types(self, key=None, oldvalue=None, newvalue=None, **kw):
"""Ensure the data format given is known."""
if key in ('type', None) and not format.Validator(
self['type'], None).is_valid():
raise ValueError('Unknown type "%s"' % (self['type'],))
elif key in ('format', None) and not format.Validator(
self['type'], self['format']).is_valid():
raise ValueError(
'Unknown format "%s.%s"' % (self['type'], self['format'])
)
def validate(self, data_spec):
"""Ensure the given data spec is compatible with this port.
:param dict data_spec: Data specification
:returns: bool
>>> spec = {'name': 'a', 'type': 'number', 'format': 'number'}
>>> port = Port(spec)
>>> port.validate({'format': 'number', 'data': 1.5})
True
>>> port.validate({'format': 'json', 'data': '1.5'})
True
>>> port.validate({'format': 'number', 'data': '1.5'})
False
>>> port.validate({'format': 'unknown format', 'data': '...'})
False
"""
try:
return isvalid(self.type, data_spec)
except Exception: # catchall validation error
return False
def convert(self, data_spec, format):
"""Convert to a compatible data format.
:param dict data_spec: Data specification
:param str format: The target data format
:returns: dict
>>> spec = {'name': 'a', 'type': 'number', 'format': 'number'}
>>> port = Port(spec)
>>> new_spec = port.convert({'format': 'number', 'data': 1}, 'json')
>>> new_spec['format']
'json'
>>> port.fetch(new_spec)
1
"""
return convert(self.type, data_spec, {'format': format})
def fetch(self, data_spec):
"""Return the data described by the given specification.
:param dict data_spec: A data specification object
:returns: data
:raises ValidationError: when the validation check fails
>>> port = Port({'name': 'a', 'type': 'number', 'format': 'number'})
>>> port.fetch({'format': 'number', 'data': -1})
-1
"""
if self.auto_validate and not self.validate(data_spec):
raise ValidationError(self, data_spec)
if self.auto_convert:
_data = self.convert(data_spec, self.format)
data = _data.get('data')
elif self.format == data_spec.get('format'):
# TODO: This doesn't look right...
if 'data' in self and self['data'] is not None:
data = self['data']
else:
data = io.fetch(data_spec, task_input=self).get('data')
else:
raise Exception('Expected matching data formats ({} != {})' % (
str(data_spec['format']), str(self.format)
))
return data
def push(self, data_spec):
"""Write data a to remote destination according the to specification.
:param dict data_spec: A data specification object
:returns: dict
>>> port = Port({'name': 'a', 'type': 'number', 'format': 'number'})
>>> port.push({'format': 'json', 'mode': 'inline', 'data': '2'})['data']
2
>>> port.push({'format': 'number', 'mode': 'inline', 'data': 3})['data']
3
"""
_spec = data_spec
if self.auto_validate and not self.validate(_spec):
raise ValidationError(self, _spec)
if self.auto_convert:
_spec = self.convert(_spec, self.format)
elif _spec['format'] == self.format:
data = data_spec.get('script_data') # Is this always a task output?
io.push(data, _spec, task_output=self.spec)
else:
raise Exception('Expected matching data formats ({} != {})' % (
str(_spec['format']), str(self.format)
))
return _spec
Port.make_property('name', 'The name of the port')
Port.make_property('type', 'The data type of the port', 'python')
Port.make_property('format', 'The data format of the port', 'object')
Port.make_property('auto_convert', 'If the data format is automatically', True)
Port.make_property('auto_validate', 'If the data is validated by default', True)
__all__ = (
'Port',
'ValidationError'
) | 0.750918 | 0.401365 |
import uuid
import os
import logging
import asyncio
import json
from fastapi import HTTPException
from pydantic import BaseModel
from aio_pika import ExchangeType, Message, connect_robust
from aio_pika.abc import AbstractIncomingMessage
from app import mq_settings
LOGGER = logging.getLogger(__name__)
def uuid4():
"""Cryptographycally secure UUID generator."""
return uuid.UUID(bytes=os.urandom(16), version=4)
class MQConnector:
def __init__(self):
self.futures = {}
self.loop = asyncio.get_running_loop()
self.connection = None
self.channel = None
self.exchange = None
self.callback_queue = None
async def connect(self):
self.connection = await connect_robust(
host=mq_settings.host,
port=mq_settings.port,
login=mq_settings.username,
password=<PASSWORD>
)
self.channel = await self.connection.channel()
self.exchange = await self.channel.declare_exchange(mq_settings.exchange, ExchangeType.DIRECT)
self.callback_queue = await self.channel.declare_queue(exclusive=True)
await self.callback_queue.consume(self.on_response)
async def disconnect(self):
await self.callback_queue.delete()
await self.connection.close()
async def on_response(self, message: AbstractIncomingMessage):
if message.correlation_id in self.futures:
LOGGER.info(f"Received response for request: {{id: {message.correlation_id}}}")
future = self.futures.pop(message.correlation_id)
future.set_result(json.loads(message.body))
LOGGER.debug(f"Response for {message.correlation_id}: {json.loads(message.body)}")
else:
LOGGER.warning(f"Response received after message timeout: {{id: {message.correlation_id}}}")
await message.ack()
async def publish_request(self, body: BaseModel, language: str):
"""
Publishes the request to RabbitMQ.
"""
correlation_id = str(uuid4())
future = self.loop.create_future()
self.futures[correlation_id] = future
body = body.json().encode()
message = Message(
body,
content_type='application/json',
correlation_id=correlation_id,
expiration=mq_settings.timeout,
reply_to=self.callback_queue.name
)
try:
await self.exchange.publish(message, routing_key=f"{mq_settings.exchange}.{language}")
except Exception as e:
LOGGER.exception(e)
LOGGER.info("Attempting to restore the channel.")
await self.channel.reopen()
await self.exchange.publish(message, routing_key=f"{mq_settings.exchange}.{language}")
LOGGER.info(f"Sent request: {{id: {correlation_id}, routing_key: {mq_settings.exchange}.{language}}}")
LOGGER.debug(f"Request {correlation_id} content: {{id: {correlation_id}}}")
try:
response = await asyncio.wait_for(future, timeout=mq_settings.timeout/1000)
except asyncio.TimeoutError:
LOGGER.info(f"Request timed out: {{id: {message.correlation_id}}}")
self.futures.pop(message.correlation_id)
raise HTTPException(408)
return response
mq_connector = MQConnector() | app/mq_connector.py | import uuid
import os
import logging
import asyncio
import json
from fastapi import HTTPException
from pydantic import BaseModel
from aio_pika import ExchangeType, Message, connect_robust
from aio_pika.abc import AbstractIncomingMessage
from app import mq_settings
LOGGER = logging.getLogger(__name__)
def uuid4():
"""Cryptographycally secure UUID generator."""
return uuid.UUID(bytes=os.urandom(16), version=4)
class MQConnector:
def __init__(self):
self.futures = {}
self.loop = asyncio.get_running_loop()
self.connection = None
self.channel = None
self.exchange = None
self.callback_queue = None
async def connect(self):
self.connection = await connect_robust(
host=mq_settings.host,
port=mq_settings.port,
login=mq_settings.username,
password=<PASSWORD>
)
self.channel = await self.connection.channel()
self.exchange = await self.channel.declare_exchange(mq_settings.exchange, ExchangeType.DIRECT)
self.callback_queue = await self.channel.declare_queue(exclusive=True)
await self.callback_queue.consume(self.on_response)
async def disconnect(self):
await self.callback_queue.delete()
await self.connection.close()
async def on_response(self, message: AbstractIncomingMessage):
if message.correlation_id in self.futures:
LOGGER.info(f"Received response for request: {{id: {message.correlation_id}}}")
future = self.futures.pop(message.correlation_id)
future.set_result(json.loads(message.body))
LOGGER.debug(f"Response for {message.correlation_id}: {json.loads(message.body)}")
else:
LOGGER.warning(f"Response received after message timeout: {{id: {message.correlation_id}}}")
await message.ack()
async def publish_request(self, body: BaseModel, language: str):
"""
Publishes the request to RabbitMQ.
"""
correlation_id = str(uuid4())
future = self.loop.create_future()
self.futures[correlation_id] = future
body = body.json().encode()
message = Message(
body,
content_type='application/json',
correlation_id=correlation_id,
expiration=mq_settings.timeout,
reply_to=self.callback_queue.name
)
try:
await self.exchange.publish(message, routing_key=f"{mq_settings.exchange}.{language}")
except Exception as e:
LOGGER.exception(e)
LOGGER.info("Attempting to restore the channel.")
await self.channel.reopen()
await self.exchange.publish(message, routing_key=f"{mq_settings.exchange}.{language}")
LOGGER.info(f"Sent request: {{id: {correlation_id}, routing_key: {mq_settings.exchange}.{language}}}")
LOGGER.debug(f"Request {correlation_id} content: {{id: {correlation_id}}}")
try:
response = await asyncio.wait_for(future, timeout=mq_settings.timeout/1000)
except asyncio.TimeoutError:
LOGGER.info(f"Request timed out: {{id: {message.correlation_id}}}")
self.futures.pop(message.correlation_id)
raise HTTPException(408)
return response
mq_connector = MQConnector() | 0.612657 | 0.052765 |
import graphene
from graphene_django.types import DjangoObjectType
from django.contrib.auth.models import User
from ..models import TaskManager, Task, Note, Release
from .types import TaskManagerType, TaskType, NoteType, ReleaseType
from .inputs import TaskManagerInput, TaskInput, NoteInput, ReleaseInput
from apps.core.utils import get_or_none
class CreateTaskManager(graphene.Mutation):
class Arguments:
input = TaskManagerInput(required=True)
ok = graphene.Boolean()
taskmanager = graphene.Field(TaskManagerType)
@staticmethod
def mutate(root, info, input=None):
ok = True
taskmanager_instance = TaskManager(
project_name=input.project_name,
project_id=input.project_id,
owner=info.context.user,
)
taskmanager_instance.save()
return CreateTaskManager(ok=ok, taskmanager=taskmanager_instance)
class UpdateTaskManager(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
input = TaskManagerInput(required=True)
ok = graphene.Boolean()
taskmanager = graphene.Field(TaskManagerType)
@staticmethod
def mutate(root, info, id, input=None):
ok = False
taskmanager_instance = get_or_none(TaskManager, pk=id)
if taskmanager_instance:
ok = True
taskmanager_instance.project_name=input.project_name
taskmanager_instance.save()
return UpdateTaskManager(ok=ok, taskmanager=taskmanager_instance)
return UpdateTaskManager(ok=ok, taskmanager=None)
class CreateTask(graphene.Mutation):
class Arguments:
taskmanager_id = graphene.Int(required=True)
responsible_id = graphene.Int(required=False)
input = TaskInput(required=True)
ok = graphene.Boolean()
task = graphene.Field(TaskType)
@staticmethod
def mutate(root, info, taskmanager_id, input=None, **kwargs):
responsible_id = kwargs.get('responsible_id', None)
ok = False
taskmanager_instance = get_or_none(TaskManager, pk=taskmanager_id)
if taskmanager_instance:
context_user_is_the_taskmanager_owner = taskmanager_instance.owner.pk == info.context.user.pk
if context_user_is_the_taskmanager_owner:
ok = True
task_instance = Task(
status=input.status,
title=input.title,
description=input.description,
expected_date=input.expected_date,
owner=info.context.user,
task_manager=taskmanager_instance,
)
responsible = get_or_none(User, pk=responsible_id)
if responsible:
task_instance.responsible=responsible
task_instance.save()
return CreateTask(ok=ok, task=task_instance)
return CreateTask(ok=ok, task=None)
class UpdateTask(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
responsible_id = graphene.Int(required=False)
input = TaskInput(required=True)
ok = graphene.Boolean()
task = graphene.Field(TaskType)
@staticmethod
def mutate(root, info, id, input=None, **kwargs):
ok = False
task_instance = get_or_none(Task, pk=id)
responsible_id = kwargs.get('responsible_id', None)
responsible = None
if responsible_id is not None:
responsible = get_or_none(User, pk=responsible_id)
if task_instance:
context_user_is_the_task_owner = task_instance.owner.pk == info.context.user.pk
context_user_is_the_task_responsible = False
if task_instance.responsible is not None:
context_user_is_the_task_responsible = task_instance.responsible.pk == info.context.user.pk
if context_user_is_the_task_owner or context_user_is_the_task_responsible:
ok=True
task_instance.status=input.status
task_instance.title=input.title
task_instance.description=input.description
task_instance.expected_date=input.expected_date
if responsible and context_user_is_the_task_owner:
task_instance.responsible=responsible
task_instance.save()
return UpdateTask(ok=ok, task=task_instance)
return UpdateTask(ok=ok, task=None)
class CreateRelease(graphene.Mutation):
class Arguments:
taskmanager_id = graphene.Int(required=True)
input = ReleaseInput(required=True)
ok = graphene.Boolean()
release = graphene.Field(ReleaseType)
@staticmethod
def mutate(root, info, taskmanager_id, input=None):
ok = False
taskmanager_instance = get_or_none(TaskManager, pk=taskmanager_id)
if taskmanager_instance:
context_user_is_the_taskmanager_owner = taskmanager_instance.owner.pk == info.context.user.pk
if context_user_is_the_taskmanager_owner:
ok = True
release_instance = Task(
completed_on=input.completed_on,
is_final_release=input.is_final_release,
title=input.title,
closed=input.closed,
description=input.description,
task_manager=taskmanager_instance,
)
release_instance.save()
return CreateRelease(ok=ok, release=release_instance)
return CreateRelease(ok=ok, release=None)
class UpdateRelease(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
input = TaskInput(required=True)
ok = graphene.Boolean()
release = graphene.Field(ReleaseType)
@staticmethod
def mutate(root, info, id, input=None):
ok = False
release_instance = get_or_none(Release, pk=id)
if release_instance:
context_user_is_the_taskmanager_release_owner = release_instance.task_manager.owner.pk == info.context.user.pk
if context_user_is_the_taskmanager_release_owner:
release_instance.completed_on=input.completed_on
release_instance.is_final_release=input.is_final_release
release_instance.title=input.title
release_instance.description=input.description
release_instance.save()
return UpdateRelease(ok=ok, release=release_instance)
return UpdateRelease(ok=ok, release=None)
class CreateNote(graphene.Mutation):
class Arguments:
task_id = graphene.Int(required=True)
input = NoteInput(required=True)
ok = graphene.Boolean()
note = graphene.Field(NoteType)
@staticmethod
def mutate(root, info, task_id, input=None):
ok = False
task_instance = get_or_none(Task, pk=task_id)
if task_instance:
ok = True
note_instance = Note(
description=input.description,
task=task_instance,
owner=info.context.user,
)
note_instance.save()
return CreateNote(ok=ok, note=note_instance)
return CreateNote(ok=ok, note=None)
class UpdateNote(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
input = NoteInput(required=True)
ok = graphene.Boolean()
note = graphene.Field(NoteType)
@staticmethod
def mutate(root, info, id, task_id, input=None):
ok = False
note_instance = get_or_none(Note, pk=id)
if note_instance:
ok = True
note_instance.description=input.description
task_instance = get_or_none(Task, pk=task_id)
if task_instance:
note_instance.task=task_instance
note_instance.save()
return UpdateNote(ok=ok, note=note_instance)
return UpdateNote(ok=ok, note=None)
class Mutation(graphene.ObjectType):
create_taskmanager = CreateTaskManager.Field()
update_taskmanager = UpdateTaskManager.Field()
create_task = CreateTask.Field()
update_task = UpdateTask.Field()
create_release = CreateRelease.Field()
update_release = UpdateRelease.Field()
create_note = CreateNote.Field()
update_note = UpdateNote.Field() | backend/apps/tasks/schema/mutation.py | import graphene
from graphene_django.types import DjangoObjectType
from django.contrib.auth.models import User
from ..models import TaskManager, Task, Note, Release
from .types import TaskManagerType, TaskType, NoteType, ReleaseType
from .inputs import TaskManagerInput, TaskInput, NoteInput, ReleaseInput
from apps.core.utils import get_or_none
class CreateTaskManager(graphene.Mutation):
class Arguments:
input = TaskManagerInput(required=True)
ok = graphene.Boolean()
taskmanager = graphene.Field(TaskManagerType)
@staticmethod
def mutate(root, info, input=None):
ok = True
taskmanager_instance = TaskManager(
project_name=input.project_name,
project_id=input.project_id,
owner=info.context.user,
)
taskmanager_instance.save()
return CreateTaskManager(ok=ok, taskmanager=taskmanager_instance)
class UpdateTaskManager(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
input = TaskManagerInput(required=True)
ok = graphene.Boolean()
taskmanager = graphene.Field(TaskManagerType)
@staticmethod
def mutate(root, info, id, input=None):
ok = False
taskmanager_instance = get_or_none(TaskManager, pk=id)
if taskmanager_instance:
ok = True
taskmanager_instance.project_name=input.project_name
taskmanager_instance.save()
return UpdateTaskManager(ok=ok, taskmanager=taskmanager_instance)
return UpdateTaskManager(ok=ok, taskmanager=None)
class CreateTask(graphene.Mutation):
class Arguments:
taskmanager_id = graphene.Int(required=True)
responsible_id = graphene.Int(required=False)
input = TaskInput(required=True)
ok = graphene.Boolean()
task = graphene.Field(TaskType)
@staticmethod
def mutate(root, info, taskmanager_id, input=None, **kwargs):
responsible_id = kwargs.get('responsible_id', None)
ok = False
taskmanager_instance = get_or_none(TaskManager, pk=taskmanager_id)
if taskmanager_instance:
context_user_is_the_taskmanager_owner = taskmanager_instance.owner.pk == info.context.user.pk
if context_user_is_the_taskmanager_owner:
ok = True
task_instance = Task(
status=input.status,
title=input.title,
description=input.description,
expected_date=input.expected_date,
owner=info.context.user,
task_manager=taskmanager_instance,
)
responsible = get_or_none(User, pk=responsible_id)
if responsible:
task_instance.responsible=responsible
task_instance.save()
return CreateTask(ok=ok, task=task_instance)
return CreateTask(ok=ok, task=None)
class UpdateTask(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
responsible_id = graphene.Int(required=False)
input = TaskInput(required=True)
ok = graphene.Boolean()
task = graphene.Field(TaskType)
@staticmethod
def mutate(root, info, id, input=None, **kwargs):
ok = False
task_instance = get_or_none(Task, pk=id)
responsible_id = kwargs.get('responsible_id', None)
responsible = None
if responsible_id is not None:
responsible = get_or_none(User, pk=responsible_id)
if task_instance:
context_user_is_the_task_owner = task_instance.owner.pk == info.context.user.pk
context_user_is_the_task_responsible = False
if task_instance.responsible is not None:
context_user_is_the_task_responsible = task_instance.responsible.pk == info.context.user.pk
if context_user_is_the_task_owner or context_user_is_the_task_responsible:
ok=True
task_instance.status=input.status
task_instance.title=input.title
task_instance.description=input.description
task_instance.expected_date=input.expected_date
if responsible and context_user_is_the_task_owner:
task_instance.responsible=responsible
task_instance.save()
return UpdateTask(ok=ok, task=task_instance)
return UpdateTask(ok=ok, task=None)
class CreateRelease(graphene.Mutation):
class Arguments:
taskmanager_id = graphene.Int(required=True)
input = ReleaseInput(required=True)
ok = graphene.Boolean()
release = graphene.Field(ReleaseType)
@staticmethod
def mutate(root, info, taskmanager_id, input=None):
ok = False
taskmanager_instance = get_or_none(TaskManager, pk=taskmanager_id)
if taskmanager_instance:
context_user_is_the_taskmanager_owner = taskmanager_instance.owner.pk == info.context.user.pk
if context_user_is_the_taskmanager_owner:
ok = True
release_instance = Task(
completed_on=input.completed_on,
is_final_release=input.is_final_release,
title=input.title,
closed=input.closed,
description=input.description,
task_manager=taskmanager_instance,
)
release_instance.save()
return CreateRelease(ok=ok, release=release_instance)
return CreateRelease(ok=ok, release=None)
class UpdateRelease(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
input = TaskInput(required=True)
ok = graphene.Boolean()
release = graphene.Field(ReleaseType)
@staticmethod
def mutate(root, info, id, input=None):
ok = False
release_instance = get_or_none(Release, pk=id)
if release_instance:
context_user_is_the_taskmanager_release_owner = release_instance.task_manager.owner.pk == info.context.user.pk
if context_user_is_the_taskmanager_release_owner:
release_instance.completed_on=input.completed_on
release_instance.is_final_release=input.is_final_release
release_instance.title=input.title
release_instance.description=input.description
release_instance.save()
return UpdateRelease(ok=ok, release=release_instance)
return UpdateRelease(ok=ok, release=None)
class CreateNote(graphene.Mutation):
class Arguments:
task_id = graphene.Int(required=True)
input = NoteInput(required=True)
ok = graphene.Boolean()
note = graphene.Field(NoteType)
@staticmethod
def mutate(root, info, task_id, input=None):
ok = False
task_instance = get_or_none(Task, pk=task_id)
if task_instance:
ok = True
note_instance = Note(
description=input.description,
task=task_instance,
owner=info.context.user,
)
note_instance.save()
return CreateNote(ok=ok, note=note_instance)
return CreateNote(ok=ok, note=None)
class UpdateNote(graphene.Mutation):
class Arguments:
id = graphene.Int(required=True)
input = NoteInput(required=True)
ok = graphene.Boolean()
note = graphene.Field(NoteType)
@staticmethod
def mutate(root, info, id, task_id, input=None):
ok = False
note_instance = get_or_none(Note, pk=id)
if note_instance:
ok = True
note_instance.description=input.description
task_instance = get_or_none(Task, pk=task_id)
if task_instance:
note_instance.task=task_instance
note_instance.save()
return UpdateNote(ok=ok, note=note_instance)
return UpdateNote(ok=ok, note=None)
class Mutation(graphene.ObjectType):
create_taskmanager = CreateTaskManager.Field()
update_taskmanager = UpdateTaskManager.Field()
create_task = CreateTask.Field()
update_task = UpdateTask.Field()
create_release = CreateRelease.Field()
update_release = UpdateRelease.Field()
create_note = CreateNote.Field()
update_note = UpdateNote.Field() | 0.516108 | 0.109706 |
from util.hook import *
from util import web
from util import output
from util import database
import re
import socket
base = 'https://www.projecthoneypot.org/ip_%s'
db = []
@hook(rule=r'.*', event='JOIN', rate=10)
def auto_honeypot(code, input):
"""Check joining users against the Project Honeypot Database"""
if not code.config('honeypot_on_join') or input.nick == code.nick:
return
global db
ip = get_ip(input.host)
try:
abuser = check(ip)
except:
return output.error('Failed to get IP information. Project Honeypot seems to be down!')
if abuser:
# First, we need to check if we've already checked for it, and got a
# match...
if ip in db:
return
db.append(ip)
database.set(code.default, db, 'honeypot')
if code.config('kickban_on_honeypot') and code.chan[input.sender][code.nick]['op']:
# Wants to kickban, and we've got op. BANHAMMER TIME!
code.write(['MODE', input.sender, '+b', '*!*@' + input.host])
code.write(['KICK', input.sender, input.nick], abuser)
code.say(abuser)
@hook(cmds=['honeypot', 'abuse'], rate=10, args=True)
def honeypot(code, input):
try:
ip = get_ip(input.group(2))
abuser = check(ip)
if abuser:
return code.say(abuser)
else:
return code.say('{green}This user isn\'t in the honeypot. The IP is likely clean!')
except:
return code.say('{red}Failed to check if IP is in the honeypot')
def check(ip):
ip = str(ip)
data = web.text(base % web.quote(ip)).replace('\n', '').replace('\r', '')
items = re.compile(r'<div class="contain">.*?<p>(.*?)</p>').findall(data)
if not items:
return
item = web.striptags(items[0])
if 'We don\'t have data on this IP currently.' in item:
return
elif 'none of its visits have resulted' in item:
return
else:
item = item.split('Below', 1)[0]
if 'The Project Honey Pot system has ' in item:
item = item.split('The Project Honey Pot system has ')[1]
item = item[0].upper() + item[1:]
if 'This IP has not seen any suspicious activity' in data:
if 'the IP address' in item:
item = item.replace('the IP address', '%s' % ip)
output.warning(str(item) +
'This is an old record so it might be invalid.')
return
if 'the IP address' in item:
item = item.replace('the IP address', '{red}%s{c}' % ip)
if 'Double check your URL to make sure this error' in item:
return
return '{b}%s{b}' % item.strip()
def get_ip(hostname):
if hostname.replace('.', '').isdigit():
return hostname
try:
return socket.gethostbyname(socket.getfqdn())
except:
return hostname
def setup(code):
global db
db = database.get(code.default, 'honeypot')
if not db:
db = [] | modules/honeypot.py | from util.hook import *
from util import web
from util import output
from util import database
import re
import socket
base = 'https://www.projecthoneypot.org/ip_%s'
db = []
@hook(rule=r'.*', event='JOIN', rate=10)
def auto_honeypot(code, input):
"""Check joining users against the Project Honeypot Database"""
if not code.config('honeypot_on_join') or input.nick == code.nick:
return
global db
ip = get_ip(input.host)
try:
abuser = check(ip)
except:
return output.error('Failed to get IP information. Project Honeypot seems to be down!')
if abuser:
# First, we need to check if we've already checked for it, and got a
# match...
if ip in db:
return
db.append(ip)
database.set(code.default, db, 'honeypot')
if code.config('kickban_on_honeypot') and code.chan[input.sender][code.nick]['op']:
# Wants to kickban, and we've got op. BANHAMMER TIME!
code.write(['MODE', input.sender, '+b', '*!*@' + input.host])
code.write(['KICK', input.sender, input.nick], abuser)
code.say(abuser)
@hook(cmds=['honeypot', 'abuse'], rate=10, args=True)
def honeypot(code, input):
try:
ip = get_ip(input.group(2))
abuser = check(ip)
if abuser:
return code.say(abuser)
else:
return code.say('{green}This user isn\'t in the honeypot. The IP is likely clean!')
except:
return code.say('{red}Failed to check if IP is in the honeypot')
def check(ip):
ip = str(ip)
data = web.text(base % web.quote(ip)).replace('\n', '').replace('\r', '')
items = re.compile(r'<div class="contain">.*?<p>(.*?)</p>').findall(data)
if not items:
return
item = web.striptags(items[0])
if 'We don\'t have data on this IP currently.' in item:
return
elif 'none of its visits have resulted' in item:
return
else:
item = item.split('Below', 1)[0]
if 'The Project Honey Pot system has ' in item:
item = item.split('The Project Honey Pot system has ')[1]
item = item[0].upper() + item[1:]
if 'This IP has not seen any suspicious activity' in data:
if 'the IP address' in item:
item = item.replace('the IP address', '%s' % ip)
output.warning(str(item) +
'This is an old record so it might be invalid.')
return
if 'the IP address' in item:
item = item.replace('the IP address', '{red}%s{c}' % ip)
if 'Double check your URL to make sure this error' in item:
return
return '{b}%s{b}' % item.strip()
def get_ip(hostname):
if hostname.replace('.', '').isdigit():
return hostname
try:
return socket.gethostbyname(socket.getfqdn())
except:
return hostname
def setup(code):
global db
db = database.get(code.default, 'honeypot')
if not db:
db = [] | 0.354768 | 0.150372 |
from enum import Enum
import copy
from abc import ABC, abstractmethod
import numbers
from itertools import count
import numpy as np
import scipy
class Type(Enum):
Continuous = 'c'
Discrete = 'o'
class DuplicateHyperparameterError(Exception):
pass
class MissingHyperparameterError(Exception):
pass
class Configuration:
def __init__(self, hyperparameters):
idxs = np.argsort([x._init_idx for x in hyperparameters])
hyperparameters = np.array(hyperparameters)[idxs]
self.hyperparameters = []
self.hyperparameter_map = {}
self.max_length = 0
self.kde_vartypes = ''
names = set()
for hyperparameter in hyperparameters:
names.add(hyperparameter.name)
length = len(hyperparameter.name)
if length > self.max_length:
self.max_length = length
if hyperparameter.cond is not None:
if not hyperparameter.cond.compare(self):
continue
if hyperparameter.name in self.hyperparameter_map:
raise DuplicateHyperparameterError(
f'Conflicting Hyperparameter: {hyperparameter.name}')
self.hyperparameter_map[hyperparameter.name] = hyperparameter
self.hyperparameters.append(hyperparameter)
self.kde_vartypes += hyperparameter.vartype
missing = names - set(self.hyperparameter_map)
if len(missing):
raise MissingHyperparameterError(
f'Parameters: {missing} are missing. '
'Implement the default case if using conditions.\n'
'E.g.\nparameter = UniformHyperparameter("paramater", 0, 10, a == b)\n'
'not_parameter = UniformHyperparameter("paramater", 0, 0, '
'~parameter.cond)')
def to_dict(self):
config = {}
for hyperparameter in self.hyperparameters:
if not hyperparameter.dont_pass:
config[hyperparameter.name] = hyperparameter.value
return config
def to_list(self):
array = []
for hyperparameter in self.hyperparameters:
if hyperparameter.type == Type.Continuous:
array.append(hyperparameter.value)
elif hyperparameter.type == Type.Discrete:
array.append(hyperparameter.index)
else:
raise NotImplementedError
return array
def __getitem__(self, idx):
return self.hyperparameters[idx]
def __str__(self):
string = ["Configuration:\n"]
for hyperparameter in self.hyperparameters:
string.append(
(f'{"Name:":>8} {hyperparameter.name: <{self.max_length}} | '
f"Value: {hyperparameter.value}\n").ljust(10))
return ''.join(string)
class Hyperparameter(ABC):
_init_count = count()
def __init__(self, name, value, cond=None, dont_pass=False):
self._value = None
self.name = name
self.value = value
self.cond = cond
self._init_idx = next(Hyperparameter._init_count)
self.dont_pass = dont_pass
def new(self, value=None):
new_hyperparameter = copy.deepcopy(self)
if value is not None:
new_hyperparameter.value = value
return new_hyperparameter
@abstractmethod
def sample(self):
...
@property
def type(self):
return self._type
@type.setter
def type(self, type):
self.vartype = type.value
self._type = type
def __eq__(self, other):
if isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value == other.value))
else:
return Condition(
lambda configs: (configs[self.name].value == other))
def __lt__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value < other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value < other.value))
else:
raise NotImplementedError
def __le__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value <= other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value <= other.value))
else:
raise NotImplementedError
def __ne__(self, other):
if isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value != other.value))
else:
return Condition(
lambda configs: (configs[self.name].value != other))
def __gt__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value > other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value > other.value))
else:
raise NotImplementedError
def __ge__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value >= other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value >= other.value))
else:
raise NotImplementedError
class ConfigurationSpace:
def __init__(self, hyperparameters, seed=None):
self.hyperparameters = hyperparameters
self.rng = np.random.default_rng(seed)
discrete_map = {}
for hyperparameter in self.hyperparameters:
if hyperparameter.type == Type.Discrete:
if hyperparameter.name in discrete_map:
m = list(np.unique(discrete_map[hyperparameter.name]._choices +
hyperparameter.choices))
discrete_map[hyperparameter.name]._choices = m
hyperparameter._choices = m
else:
discrete_map[hyperparameter.name] = hyperparameter
def sample_configuration(self):
hyperparameters = []
for hyperparameter in self.hyperparameters:
hyperparameters.append(hyperparameter.sample(self.rng))
return Configuration(hyperparameters)
def __len__(self):
return len(self.hyperparameters)
class Condition:
def __init__(self, comp):
self.comp = comp
def compare(self, configuration):
return self.comp(configuration.hyperparameter_map)
def __and__(self, other):
return Condition(lambda configs: self.comp(configs) and other.comp(configs))
def __or__(self, other):
return Condition(lambda configs: self.comp(configs) or other.comp(configs))
def __invert__(self):
return Condition(lambda configs: not self.comp(configs))
class UniformHyperparameter(Hyperparameter):
def __init__(self, name, lower, upper, cond=None, log=False, dont_pass=False):
self.type = Type.Continuous
self._lower = lower
self._upper = upper
self.lower = np.log(lower) if log else lower
self.upper = np.log(upper) if log else upper
self.log = log
value = (self.lower + self.upper) / 2
super().__init__(name, np.exp(value) if log else value, cond, dont_pass)
def sample(self, rng):
value = rng.uniform(self.lower, self.upper)
return self.new(np.exp(value) if self.log else value)
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = min(max(self._lower, value), self._upper)
class IntegerUniformHyperparameter(UniformHyperparameter):
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = int(round(min(max(self._lower, value), self._upper)))
class NormalHyperparameter(Hyperparameter):
def __init__(self, name, mean, sigma, cond=None, dont_pass=False):
self.type = Type.Continuous
self.mean = mean
self.sigma = sigma
super().__init__(name, self.mean, cond, dont_pass)
def sample(self, rng):
return self.new(rng.normal(self.mean, self.sigma))
class IntegerNormalHyperparameter(NormalHyperparameter):
def __init__(self, name, mean, sigma, cond=None, dont_pass=False):
self.rv = scipy.stats.truncnorm(a=-sigma, b=sigma, scale=sigma, loc=mean)
super().__init__(name, mean, sigma, cond, dont_pass)
def sample(self, rng):
return self.new(self.rv.rvs(random_state=rng))
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = int(round(value))
class CategoricalHyperparameter(Hyperparameter):
def __init__(self, name, choices, cond=None, dont_pass=False):
self.type = Type.Discrete
self.index = 0
self.choices = choices
self._choices = choices
super().__init__(name, self.index, cond, dont_pass)
def sample(self, rng):
index = rng.integers(0, len(self.choices))
if len(self._choices) == len(self.choices):
_index = index
else:
_index = self._choices.index(self.choices[index])
return self.new(_index)
@property
def value(self):
return self._value
@value.setter
def value(self, index):
self.index = index
self._value = self._choices[index] | bohb/configspace.py | from enum import Enum
import copy
from abc import ABC, abstractmethod
import numbers
from itertools import count
import numpy as np
import scipy
class Type(Enum):
Continuous = 'c'
Discrete = 'o'
class DuplicateHyperparameterError(Exception):
pass
class MissingHyperparameterError(Exception):
pass
class Configuration:
def __init__(self, hyperparameters):
idxs = np.argsort([x._init_idx for x in hyperparameters])
hyperparameters = np.array(hyperparameters)[idxs]
self.hyperparameters = []
self.hyperparameter_map = {}
self.max_length = 0
self.kde_vartypes = ''
names = set()
for hyperparameter in hyperparameters:
names.add(hyperparameter.name)
length = len(hyperparameter.name)
if length > self.max_length:
self.max_length = length
if hyperparameter.cond is not None:
if not hyperparameter.cond.compare(self):
continue
if hyperparameter.name in self.hyperparameter_map:
raise DuplicateHyperparameterError(
f'Conflicting Hyperparameter: {hyperparameter.name}')
self.hyperparameter_map[hyperparameter.name] = hyperparameter
self.hyperparameters.append(hyperparameter)
self.kde_vartypes += hyperparameter.vartype
missing = names - set(self.hyperparameter_map)
if len(missing):
raise MissingHyperparameterError(
f'Parameters: {missing} are missing. '
'Implement the default case if using conditions.\n'
'E.g.\nparameter = UniformHyperparameter("paramater", 0, 10, a == b)\n'
'not_parameter = UniformHyperparameter("paramater", 0, 0, '
'~parameter.cond)')
def to_dict(self):
config = {}
for hyperparameter in self.hyperparameters:
if not hyperparameter.dont_pass:
config[hyperparameter.name] = hyperparameter.value
return config
def to_list(self):
array = []
for hyperparameter in self.hyperparameters:
if hyperparameter.type == Type.Continuous:
array.append(hyperparameter.value)
elif hyperparameter.type == Type.Discrete:
array.append(hyperparameter.index)
else:
raise NotImplementedError
return array
def __getitem__(self, idx):
return self.hyperparameters[idx]
def __str__(self):
string = ["Configuration:\n"]
for hyperparameter in self.hyperparameters:
string.append(
(f'{"Name:":>8} {hyperparameter.name: <{self.max_length}} | '
f"Value: {hyperparameter.value}\n").ljust(10))
return ''.join(string)
class Hyperparameter(ABC):
_init_count = count()
def __init__(self, name, value, cond=None, dont_pass=False):
self._value = None
self.name = name
self.value = value
self.cond = cond
self._init_idx = next(Hyperparameter._init_count)
self.dont_pass = dont_pass
def new(self, value=None):
new_hyperparameter = copy.deepcopy(self)
if value is not None:
new_hyperparameter.value = value
return new_hyperparameter
@abstractmethod
def sample(self):
...
@property
def type(self):
return self._type
@type.setter
def type(self, type):
self.vartype = type.value
self._type = type
def __eq__(self, other):
if isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value == other.value))
else:
return Condition(
lambda configs: (configs[self.name].value == other))
def __lt__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value < other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value < other.value))
else:
raise NotImplementedError
def __le__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value <= other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value <= other.value))
else:
raise NotImplementedError
def __ne__(self, other):
if isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value != other.value))
else:
return Condition(
lambda configs: (configs[self.name].value != other))
def __gt__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value > other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value > other.value))
else:
raise NotImplementedError
def __ge__(self, other):
if isinstance(other, numbers.Number):
return Condition(
lambda configs: (configs[self.name].value >= other))
elif isinstance(other, Hyperparameter):
return Condition(
lambda configs: (configs[self.name].value >= other.value))
else:
raise NotImplementedError
class ConfigurationSpace:
def __init__(self, hyperparameters, seed=None):
self.hyperparameters = hyperparameters
self.rng = np.random.default_rng(seed)
discrete_map = {}
for hyperparameter in self.hyperparameters:
if hyperparameter.type == Type.Discrete:
if hyperparameter.name in discrete_map:
m = list(np.unique(discrete_map[hyperparameter.name]._choices +
hyperparameter.choices))
discrete_map[hyperparameter.name]._choices = m
hyperparameter._choices = m
else:
discrete_map[hyperparameter.name] = hyperparameter
def sample_configuration(self):
hyperparameters = []
for hyperparameter in self.hyperparameters:
hyperparameters.append(hyperparameter.sample(self.rng))
return Configuration(hyperparameters)
def __len__(self):
return len(self.hyperparameters)
class Condition:
def __init__(self, comp):
self.comp = comp
def compare(self, configuration):
return self.comp(configuration.hyperparameter_map)
def __and__(self, other):
return Condition(lambda configs: self.comp(configs) and other.comp(configs))
def __or__(self, other):
return Condition(lambda configs: self.comp(configs) or other.comp(configs))
def __invert__(self):
return Condition(lambda configs: not self.comp(configs))
class UniformHyperparameter(Hyperparameter):
def __init__(self, name, lower, upper, cond=None, log=False, dont_pass=False):
self.type = Type.Continuous
self._lower = lower
self._upper = upper
self.lower = np.log(lower) if log else lower
self.upper = np.log(upper) if log else upper
self.log = log
value = (self.lower + self.upper) / 2
super().__init__(name, np.exp(value) if log else value, cond, dont_pass)
def sample(self, rng):
value = rng.uniform(self.lower, self.upper)
return self.new(np.exp(value) if self.log else value)
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = min(max(self._lower, value), self._upper)
class IntegerUniformHyperparameter(UniformHyperparameter):
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = int(round(min(max(self._lower, value), self._upper)))
class NormalHyperparameter(Hyperparameter):
def __init__(self, name, mean, sigma, cond=None, dont_pass=False):
self.type = Type.Continuous
self.mean = mean
self.sigma = sigma
super().__init__(name, self.mean, cond, dont_pass)
def sample(self, rng):
return self.new(rng.normal(self.mean, self.sigma))
class IntegerNormalHyperparameter(NormalHyperparameter):
def __init__(self, name, mean, sigma, cond=None, dont_pass=False):
self.rv = scipy.stats.truncnorm(a=-sigma, b=sigma, scale=sigma, loc=mean)
super().__init__(name, mean, sigma, cond, dont_pass)
def sample(self, rng):
return self.new(self.rv.rvs(random_state=rng))
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = int(round(value))
class CategoricalHyperparameter(Hyperparameter):
def __init__(self, name, choices, cond=None, dont_pass=False):
self.type = Type.Discrete
self.index = 0
self.choices = choices
self._choices = choices
super().__init__(name, self.index, cond, dont_pass)
def sample(self, rng):
index = rng.integers(0, len(self.choices))
if len(self._choices) == len(self.choices):
_index = index
else:
_index = self._choices.index(self.choices[index])
return self.new(_index)
@property
def value(self):
return self._value
@value.setter
def value(self, index):
self.index = index
self._value = self._choices[index] | 0.768993 | 0.180395 |
import asyncio
import logging
import typing as t
from odss.core.bundle import BundleContext
from .consts import CALLBACK_INVALIDATE, CALLBACK_VALIDATE
from .contexts import ComponentContext
from .interfaces import IComponentManager, IHandler
logger = logging.getLogger(__name__)
class ComponentManager(IComponentManager):
VALID = "valid"
INVALID = "invalid"
STOPED = "stoped"
def __init__(
self, context: ComponentContext, handlers: t.Iterable[IHandler]
) -> None:
self._state = ComponentManager.INVALID
self._context = context
self._handlers = handlers
self._requirements = None
@property
def context(self):
return self._context
def get_instance(self):
if self._instance is None:
raise TypeError("Not created component instance")
return self._instance
def get_bundle_context(self) -> BundleContext:
return self._context.get_bundle_context()
async def start(self):
await self.__handlers_callback("start")
await self.check_lifecycle()
async def stop(self):
await self.invalidate()
await self.__handlers_callback("stop")
self._state = ComponentManager.STOPED
async def invoke(self, method, service, reference):
async_handler_callback = asyncio.coroutine(method)
await async_handler_callback(self, service)
def set_requirements(self, requirements):
if self._requirements is not None:
raise TypeError("Requirements already setup")
self._requirements = requirements
def reset_requirements(self):
self._requirements = None
async def check_lifecycle(self):
was_valid = self._state == ComponentManager.VALID
is_valid = await self.__handlers_callback("is_valid", break_on_false=True)
if was_valid and not is_valid:
await self.invalidate()
elif is_valid:
await self.validate()
async def validate(self):
await self.__handlers_callback("pre_validate")
args = self._requirements if self._requirements is not None else []
self._instance = self._context.factory_class(*args)
await self.__validation_callback(CALLBACK_VALIDATE)
self._state = ComponentManager.VALID
await self.__handlers_callback("post_validate")
async def invalidate(self):
await self.__handlers_callback("pre_invalidate")
await self.__validation_callback(CALLBACK_INVALIDATE)
self._state = ComponentManager.INVALID
self._instance = None
await self.__handlers_callback("post_invalidate")
async def __handlers_callback(self, method_name, *args, **kwargs):
break_on_false = kwargs.pop("break_on_false", False)
result = True
for handler in self._handlers:
try:
handler_callback = getattr(handler, method_name)
except AttributeError:
pass
else:
try:
async_handler_callback = asyncio.coroutine(handler_callback)
res = await async_handler_callback(*args, **kwargs)
if res is not None and not res:
result = False
if break_on_false:
break
except Exception as ex:
# Log errors
logger.exception("Error calling handler '%s': %s", handler, ex)
return result
async def __validation_callback(self, kind: str):
callback, args = self._context.get_callback(kind)
if not callback:
return True
try:
async_callback = asyncio.coroutine(callback)
await async_callback(self._instance, self._context.get_bundle_context())
except Exception as ex:
logger.exception(
"Error calling @Validate/@Invalidate method '%s': %s", kind, ex
) | odss/cdi/component.py | import asyncio
import logging
import typing as t
from odss.core.bundle import BundleContext
from .consts import CALLBACK_INVALIDATE, CALLBACK_VALIDATE
from .contexts import ComponentContext
from .interfaces import IComponentManager, IHandler
logger = logging.getLogger(__name__)
class ComponentManager(IComponentManager):
VALID = "valid"
INVALID = "invalid"
STOPED = "stoped"
def __init__(
self, context: ComponentContext, handlers: t.Iterable[IHandler]
) -> None:
self._state = ComponentManager.INVALID
self._context = context
self._handlers = handlers
self._requirements = None
@property
def context(self):
return self._context
def get_instance(self):
if self._instance is None:
raise TypeError("Not created component instance")
return self._instance
def get_bundle_context(self) -> BundleContext:
return self._context.get_bundle_context()
async def start(self):
await self.__handlers_callback("start")
await self.check_lifecycle()
async def stop(self):
await self.invalidate()
await self.__handlers_callback("stop")
self._state = ComponentManager.STOPED
async def invoke(self, method, service, reference):
async_handler_callback = asyncio.coroutine(method)
await async_handler_callback(self, service)
def set_requirements(self, requirements):
if self._requirements is not None:
raise TypeError("Requirements already setup")
self._requirements = requirements
def reset_requirements(self):
self._requirements = None
async def check_lifecycle(self):
was_valid = self._state == ComponentManager.VALID
is_valid = await self.__handlers_callback("is_valid", break_on_false=True)
if was_valid and not is_valid:
await self.invalidate()
elif is_valid:
await self.validate()
async def validate(self):
await self.__handlers_callback("pre_validate")
args = self._requirements if self._requirements is not None else []
self._instance = self._context.factory_class(*args)
await self.__validation_callback(CALLBACK_VALIDATE)
self._state = ComponentManager.VALID
await self.__handlers_callback("post_validate")
async def invalidate(self):
await self.__handlers_callback("pre_invalidate")
await self.__validation_callback(CALLBACK_INVALIDATE)
self._state = ComponentManager.INVALID
self._instance = None
await self.__handlers_callback("post_invalidate")
async def __handlers_callback(self, method_name, *args, **kwargs):
break_on_false = kwargs.pop("break_on_false", False)
result = True
for handler in self._handlers:
try:
handler_callback = getattr(handler, method_name)
except AttributeError:
pass
else:
try:
async_handler_callback = asyncio.coroutine(handler_callback)
res = await async_handler_callback(*args, **kwargs)
if res is not None and not res:
result = False
if break_on_false:
break
except Exception as ex:
# Log errors
logger.exception("Error calling handler '%s': %s", handler, ex)
return result
async def __validation_callback(self, kind: str):
callback, args = self._context.get_callback(kind)
if not callback:
return True
try:
async_callback = asyncio.coroutine(callback)
await async_callback(self._instance, self._context.get_bundle_context())
except Exception as ex:
logger.exception(
"Error calling @Validate/@Invalidate method '%s': %s", kind, ex
) | 0.503418 | 0.058858 |
import sys
from time import sleep
import pandas as pd
from instapy import InstaPy
from instapy import set_workspace
from instapy import smart_run
from selenium import webdriver
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.chrome.options import Options
from util.account import login
from util.chromedriver import init_chromedriver
from util.datasaver import Datasaver
from util.exceptions import PageNotFound404, NoInstaProfilePageFound
from util.extractor import extract_exact_info
from util.instalogger import InstaLogger
from util.settings import Settings
from util.util import web_adress_navigator
chrome_options = Options()
chromeOptions = webdriver.ChromeOptions()
prefs = {'profile.managed_default_content_settings.images': 2,
'disk-cache-size': 4096}
chromeOptions.add_experimental_option("prefs", prefs)
chrome_options.add_argument('--dns-prefetch-disable')
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--lang=en-US')
chrome_options.add_argument('--headless')
chrome_options.add_experimental_option('prefs', {'intl.accept_languages': 'en-US'})
capabilities = DesiredCapabilities.CHROME
def get_user_info(browser, username):
"""Get the basic user info from the profile screen"""
num_of_posts = 0
followers = {'count': 0}
following = {'count': 0}
prof_img = ""
bio = ""
bio_url = ""
alias = ""
container = browser.find_element_by_class_name('v9tJq')
isprivate = False
try:
if container.find_element_by_class_name('Nd_Rl'):
isprivate = True
except BaseException:
isprivate = False
try:
alias = container.find_element_by_class_name('-vDIg').find_element_by_tag_name('h1').text
except BaseException:
InstaLogger.logger().info("alias is empty")
try:
bio = container.find_element_by_class_name(
'-vDIg').find_element_by_tag_name('span').text
except BaseException:
InstaLogger.logger().info("Bio is empty")
try:
bio_url = container.find_element_by_class_name('yLUwa').text
except BaseException:
InstaLogger.logger().info("Bio Url is empty")
try:
img_container = browser.find_element_by_class_name('RR-M-')
prof_img = img_container.find_element_by_tag_name(
'img').get_attribute('src')
except BaseException:
InstaLogger.logger().info("image is empty")
try:
infos = container.find_elements_by_class_name('Y8-fY')
try:
num_of_posts = extract_exact_info(infos[0])
except BaseException:
InstaLogger.logger().error("Number of Posts empty")
try:
following = {'count': extract_exact_info(infos[2])}
except BaseException:
InstaLogger.logger().error("Following is empty")
try:
followers = {'count': extract_exact_info(infos[1])}
except BaseException:
InstaLogger.logger().error("Follower is empty")
except BaseException:
InstaLogger.logger().error("Infos (Following, Abo, Posts) is empty")
information = {
'alias': alias,
'username': username,
'bio': bio,
'prof_img': prof_img,
'num_of_posts': num_of_posts,
'followers': followers,
'following': following,
'bio_url': bio_url,
'isprivate': isprivate,
}
InstaLogger.logger().info("alias name: " + information['alias'])
InstaLogger.logger().info("bio: " + information['bio'])
InstaLogger.logger().info("url: " + information['bio_url'])
InstaLogger.logger().info("Posts: " + str(information['num_of_posts']))
InstaLogger.logger().info("Follower: " + str(information['followers']['count']))
InstaLogger.logger().info("Following: " + str(information['following']['count']))
InstaLogger.logger().info("isPrivate: " + str(information['isprivate']))
return information
def extract_information(browser, username):
try:
user_link = "https://www.instagram.com/{}/".format(username)
web_adress_navigator(browser, user_link)
except PageNotFound404 as e:
raise NoInstaProfilePageFound(e)
try:
userinfo = get_user_info(browser, username)
except Exception as err:
quit()
return userinfo
# https://github.com/timgrossmann/InstaPy#grab-followers-of-a-user
def grab_followers(target_user='nightmello'):
# set workspace folder at desired location (default is at your home folder)
set_workspace(path=None)
# get an InstaPy session!
session = InstaPy(username=Settings.login_username,
password=Settings.login_password,
headless_browser=True)
with smart_run(session):
selected_followers = session.grab_followers(
username=target_user,
amount="full",
live_match=True,
store_locally=True)
return selected_followers
def find_real_fans(target_user='nightmello'):
followers_list = grab_followers(target_user)
sleep(30)
fan_list = {}
try:
browser = init_chromedriver(chrome_options, capabilities)
except Exception as exc:
print(exc)
sys.exit()
try:
login(
browser,
Settings.login_username,
Settings.login_password)
for user in followers_list:
print('Extracting information from ' + user)
try:
information = extract_information(browser, user)
fan_list[user] = information
except BaseException:
print("Error with user " + user)
sys.exit(1)
Datasaver.save_profile_json(user, information)
print("\nFinished.\n")
except KeyboardInterrupt:
print('Aborted...')
finally:
browser.delete_all_cookies()
browser.close()
df = pd.DataFrame(columns=['alias', 'private', 'num_posts', 'num_followers', 'num_following'])
for id, element in enumerate(fan_list):
alias = element
is_private = fan_list[element]['isprivate']
num_posts = fan_list[element]['num_of_posts']
num_followers = fan_list[element]['followers']['count']
num_following = fan_list[element]['following']['count']
info = [alias, is_private, num_posts, num_followers, num_following]
tmp = pd.DataFrame([info], columns=['alias', 'private', 'num_posts', 'num_followers', 'num_following'])
df = df.append(tmp, ignore_index=True)
print(id, info)
df.to_csv('real_fans_of_{}.csv'.format(target_user), sep='\t', encoding='utf-8')
return df
find_real_fans('nightmello') | crawl_real_fans.py | import sys
from time import sleep
import pandas as pd
from instapy import InstaPy
from instapy import set_workspace
from instapy import smart_run
from selenium import webdriver
from selenium.webdriver import DesiredCapabilities
from selenium.webdriver.chrome.options import Options
from util.account import login
from util.chromedriver import init_chromedriver
from util.datasaver import Datasaver
from util.exceptions import PageNotFound404, NoInstaProfilePageFound
from util.extractor import extract_exact_info
from util.instalogger import InstaLogger
from util.settings import Settings
from util.util import web_adress_navigator
chrome_options = Options()
chromeOptions = webdriver.ChromeOptions()
prefs = {'profile.managed_default_content_settings.images': 2,
'disk-cache-size': 4096}
chromeOptions.add_experimental_option("prefs", prefs)
chrome_options.add_argument('--dns-prefetch-disable')
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--lang=en-US')
chrome_options.add_argument('--headless')
chrome_options.add_experimental_option('prefs', {'intl.accept_languages': 'en-US'})
capabilities = DesiredCapabilities.CHROME
def get_user_info(browser, username):
"""Get the basic user info from the profile screen"""
num_of_posts = 0
followers = {'count': 0}
following = {'count': 0}
prof_img = ""
bio = ""
bio_url = ""
alias = ""
container = browser.find_element_by_class_name('v9tJq')
isprivate = False
try:
if container.find_element_by_class_name('Nd_Rl'):
isprivate = True
except BaseException:
isprivate = False
try:
alias = container.find_element_by_class_name('-vDIg').find_element_by_tag_name('h1').text
except BaseException:
InstaLogger.logger().info("alias is empty")
try:
bio = container.find_element_by_class_name(
'-vDIg').find_element_by_tag_name('span').text
except BaseException:
InstaLogger.logger().info("Bio is empty")
try:
bio_url = container.find_element_by_class_name('yLUwa').text
except BaseException:
InstaLogger.logger().info("Bio Url is empty")
try:
img_container = browser.find_element_by_class_name('RR-M-')
prof_img = img_container.find_element_by_tag_name(
'img').get_attribute('src')
except BaseException:
InstaLogger.logger().info("image is empty")
try:
infos = container.find_elements_by_class_name('Y8-fY')
try:
num_of_posts = extract_exact_info(infos[0])
except BaseException:
InstaLogger.logger().error("Number of Posts empty")
try:
following = {'count': extract_exact_info(infos[2])}
except BaseException:
InstaLogger.logger().error("Following is empty")
try:
followers = {'count': extract_exact_info(infos[1])}
except BaseException:
InstaLogger.logger().error("Follower is empty")
except BaseException:
InstaLogger.logger().error("Infos (Following, Abo, Posts) is empty")
information = {
'alias': alias,
'username': username,
'bio': bio,
'prof_img': prof_img,
'num_of_posts': num_of_posts,
'followers': followers,
'following': following,
'bio_url': bio_url,
'isprivate': isprivate,
}
InstaLogger.logger().info("alias name: " + information['alias'])
InstaLogger.logger().info("bio: " + information['bio'])
InstaLogger.logger().info("url: " + information['bio_url'])
InstaLogger.logger().info("Posts: " + str(information['num_of_posts']))
InstaLogger.logger().info("Follower: " + str(information['followers']['count']))
InstaLogger.logger().info("Following: " + str(information['following']['count']))
InstaLogger.logger().info("isPrivate: " + str(information['isprivate']))
return information
def extract_information(browser, username):
try:
user_link = "https://www.instagram.com/{}/".format(username)
web_adress_navigator(browser, user_link)
except PageNotFound404 as e:
raise NoInstaProfilePageFound(e)
try:
userinfo = get_user_info(browser, username)
except Exception as err:
quit()
return userinfo
# https://github.com/timgrossmann/InstaPy#grab-followers-of-a-user
def grab_followers(target_user='nightmello'):
# set workspace folder at desired location (default is at your home folder)
set_workspace(path=None)
# get an InstaPy session!
session = InstaPy(username=Settings.login_username,
password=Settings.login_password,
headless_browser=True)
with smart_run(session):
selected_followers = session.grab_followers(
username=target_user,
amount="full",
live_match=True,
store_locally=True)
return selected_followers
def find_real_fans(target_user='nightmello'):
followers_list = grab_followers(target_user)
sleep(30)
fan_list = {}
try:
browser = init_chromedriver(chrome_options, capabilities)
except Exception as exc:
print(exc)
sys.exit()
try:
login(
browser,
Settings.login_username,
Settings.login_password)
for user in followers_list:
print('Extracting information from ' + user)
try:
information = extract_information(browser, user)
fan_list[user] = information
except BaseException:
print("Error with user " + user)
sys.exit(1)
Datasaver.save_profile_json(user, information)
print("\nFinished.\n")
except KeyboardInterrupt:
print('Aborted...')
finally:
browser.delete_all_cookies()
browser.close()
df = pd.DataFrame(columns=['alias', 'private', 'num_posts', 'num_followers', 'num_following'])
for id, element in enumerate(fan_list):
alias = element
is_private = fan_list[element]['isprivate']
num_posts = fan_list[element]['num_of_posts']
num_followers = fan_list[element]['followers']['count']
num_following = fan_list[element]['following']['count']
info = [alias, is_private, num_posts, num_followers, num_following]
tmp = pd.DataFrame([info], columns=['alias', 'private', 'num_posts', 'num_followers', 'num_following'])
df = df.append(tmp, ignore_index=True)
print(id, info)
df.to_csv('real_fans_of_{}.csv'.format(target_user), sep='\t', encoding='utf-8')
return df
find_real_fans('nightmello') | 0.255065 | 0.068164 |